From cdb57d17358d7697ec9d8a67cd236d0280ded648 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 21 Jun 2021 23:57:48 +0000 Subject: [PATCH 1/4] chore: remove all monolith Bazel deps chore: release gapic-generator-csharp v1.3.7 chore: release gapic-generator-go 0.20.5 chore: release gapic-generator-java 1.0.14 chore: release gapic-generator-php 1.0.1 chore: release gapic-generator-python 0.50.0 chore: update gapic-generator-ruby to the latest commit chore: release gapic-generator-typescript 1.5.0 Committer: @miraleung PiperOrigin-RevId: 380641501 Source-Link: https://github.com/googleapis/googleapis/commit/076f7e9f0b258bdb54338895d7251b202e8f0de3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/27e4c88b4048e5f56508d4e1aa417d60a3380892 --- owl-bot-staging/v2/.coveragerc | 17 + owl-bot-staging/v2/MANIFEST.in | 2 + owl-bot-staging/v2/README.rst | 49 + owl-bot-staging/v2/docs/conf.py | 376 + owl-bot-staging/v2/docs/index.rst | 7 + .../v2/docs/tasks_v2/cloud_tasks.rst | 10 + owl-bot-staging/v2/docs/tasks_v2/services.rst | 6 + owl-bot-staging/v2/docs/tasks_v2/types.rst | 7 + .../v2/google/cloud/tasks/__init__.py | 77 + .../v2/google/cloud/tasks/py.typed | 2 + .../v2/google/cloud/tasks_v2/__init__.py | 78 + .../google/cloud/tasks_v2/gapic_metadata.json | 183 + .../v2/google/cloud/tasks_v2/py.typed | 2 + .../cloud/tasks_v2/services/__init__.py | 15 + .../tasks_v2/services/cloud_tasks/__init__.py | 22 + .../services/cloud_tasks/async_client.py | 1801 +++++ .../tasks_v2/services/cloud_tasks/client.py | 1940 ++++++ .../tasks_v2/services/cloud_tasks/pagers.py | 264 + .../cloud_tasks/transports/__init__.py | 33 + .../services/cloud_tasks/transports/base.py | 441 ++ .../services/cloud_tasks/transports/grpc.py | 779 +++ .../cloud_tasks/transports/grpc_asyncio.py | 783 +++ .../google/cloud/tasks_v2/types/__init__.py | 80 + .../google/cloud/tasks_v2/types/cloudtasks.py | 558 ++ .../v2/google/cloud/tasks_v2/types/queue.py | 434 ++ .../v2/google/cloud/tasks_v2/types/target.py | 548 ++ .../v2/google/cloud/tasks_v2/types/task.py | 262 + owl-bot-staging/v2/mypy.ini | 3 + owl-bot-staging/v2/noxfile.py | 132 + .../v2/scripts/fixup_tasks_v2_keywords.py | 191 + owl-bot-staging/v2/setup.py | 53 + owl-bot-staging/v2/tests/__init__.py | 16 + owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../v2/tests/unit/gapic/tasks_v2/__init__.py | 16 + .../unit/gapic/tasks_v2/test_cloud_tasks.py | 5181 ++++++++++++++ owl-bot-staging/v2beta2/.coveragerc | 17 + owl-bot-staging/v2beta2/MANIFEST.in | 2 + owl-bot-staging/v2beta2/README.rst | 49 + owl-bot-staging/v2beta2/docs/conf.py | 376 + owl-bot-staging/v2beta2/docs/index.rst | 7 + .../docs/tasks_v2beta2/cloud_tasks.rst | 10 + .../v2beta2/docs/tasks_v2beta2/services.rst | 6 + .../v2beta2/docs/tasks_v2beta2/types.rst | 7 + .../v2beta2/google/cloud/tasks/__init__.py | 89 + .../v2beta2/google/cloud/tasks/py.typed | 2 + .../google/cloud/tasks_v2beta2/__init__.py | 90 + .../cloud/tasks_v2beta2/gapic_metadata.json | 223 + .../google/cloud/tasks_v2beta2/py.typed | 2 + .../cloud/tasks_v2beta2/services/__init__.py | 15 + .../services/cloud_tasks/__init__.py | 22 + .../services/cloud_tasks/async_client.py | 2249 ++++++ .../services/cloud_tasks/client.py | 2388 +++++++ .../services/cloud_tasks/pagers.py | 264 + .../cloud_tasks/transports/__init__.py | 33 + .../services/cloud_tasks/transports/base.py | 497 ++ .../services/cloud_tasks/transports/grpc.py | 942 +++ .../cloud_tasks/transports/grpc_asyncio.py | 946 +++ .../cloud/tasks_v2beta2/types/__init__.py | 92 + .../cloud/tasks_v2beta2/types/cloudtasks.py | 869 +++ .../google/cloud/tasks_v2beta2/types/queue.py | 530 ++ .../cloud/tasks_v2beta2/types/target.py | 487 ++ .../google/cloud/tasks_v2beta2/types/task.py | 254 + owl-bot-staging/v2beta2/mypy.ini | 3 + owl-bot-staging/v2beta2/noxfile.py | 132 + .../scripts/fixup_tasks_v2beta2_keywords.py | 195 + owl-bot-staging/v2beta2/setup.py | 53 + owl-bot-staging/v2beta2/tests/__init__.py | 16 + .../v2beta2/tests/unit/__init__.py | 16 + .../v2beta2/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/tasks_v2beta2/__init__.py | 16 + .../gapic/tasks_v2beta2/test_cloud_tasks.py | 6121 +++++++++++++++++ owl-bot-staging/v2beta3/.coveragerc | 17 + owl-bot-staging/v2beta3/MANIFEST.in | 2 + owl-bot-staging/v2beta3/README.rst | 49 + owl-bot-staging/v2beta3/docs/conf.py | 376 + owl-bot-staging/v2beta3/docs/index.rst | 7 + .../docs/tasks_v2beta3/cloud_tasks.rst | 10 + .../v2beta3/docs/tasks_v2beta3/services.rst | 6 + .../v2beta3/docs/tasks_v2beta3/types.rst | 7 + .../v2beta3/google/cloud/tasks/__init__.py | 83 + .../v2beta3/google/cloud/tasks/py.typed | 2 + .../google/cloud/tasks_v2beta3/__init__.py | 84 + .../cloud/tasks_v2beta3/gapic_metadata.json | 183 + .../google/cloud/tasks_v2beta3/py.typed | 2 + .../cloud/tasks_v2beta3/services/__init__.py | 15 + .../services/cloud_tasks/__init__.py | 22 + .../services/cloud_tasks/async_client.py | 1803 +++++ .../services/cloud_tasks/client.py | 1942 ++++++ .../services/cloud_tasks/pagers.py | 264 + .../cloud_tasks/transports/__init__.py | 33 + .../services/cloud_tasks/transports/base.py | 441 ++ .../services/cloud_tasks/transports/grpc.py | 780 +++ .../cloud_tasks/transports/grpc_asyncio.py | 784 +++ .../cloud/tasks_v2beta3/types/__init__.py | 86 + .../cloud/tasks_v2beta3/types/cloudtasks.py | 579 ++ .../google/cloud/tasks_v2beta3/types/queue.py | 556 ++ .../cloud/tasks_v2beta3/types/target.py | 620 ++ .../google/cloud/tasks_v2beta3/types/task.py | 280 + owl-bot-staging/v2beta3/mypy.ini | 3 + owl-bot-staging/v2beta3/noxfile.py | 132 + .../scripts/fixup_tasks_v2beta3_keywords.py | 191 + owl-bot-staging/v2beta3/setup.py | 53 + owl-bot-staging/v2beta3/tests/__init__.py | 16 + .../v2beta3/tests/unit/__init__.py | 16 + .../v2beta3/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/tasks_v2beta3/__init__.py | 16 + .../gapic/tasks_v2beta3/test_cloud_tasks.py | 5211 ++++++++++++++ 108 files changed, 46123 insertions(+) create mode 100644 owl-bot-staging/v2/.coveragerc create mode 100644 owl-bot-staging/v2/MANIFEST.in create mode 100644 owl-bot-staging/v2/README.rst create mode 100644 owl-bot-staging/v2/docs/conf.py create mode 100644 owl-bot-staging/v2/docs/index.rst create mode 100644 owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst create mode 100644 owl-bot-staging/v2/docs/tasks_v2/services.rst create mode 100644 owl-bot-staging/v2/docs/tasks_v2/types.rst create mode 100644 owl-bot-staging/v2/google/cloud/tasks/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py create mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py create mode 100644 owl-bot-staging/v2/mypy.ini create mode 100644 owl-bot-staging/v2/noxfile.py create mode 100644 owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py create mode 100644 owl-bot-staging/v2/setup.py create mode 100644 owl-bot-staging/v2/tests/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py create mode 100644 owl-bot-staging/v2beta2/.coveragerc create mode 100644 owl-bot-staging/v2beta2/MANIFEST.in create mode 100644 owl-bot-staging/v2beta2/README.rst create mode 100644 owl-bot-staging/v2beta2/docs/conf.py create mode 100644 owl-bot-staging/v2beta2/docs/index.rst create mode 100644 owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst create mode 100644 owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst create mode 100644 owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks/py.typed create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py create mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py create mode 100644 owl-bot-staging/v2beta2/mypy.ini create mode 100644 owl-bot-staging/v2beta2/noxfile.py create mode 100644 owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py create mode 100644 owl-bot-staging/v2beta2/setup.py create mode 100644 owl-bot-staging/v2beta2/tests/__init__.py create mode 100644 owl-bot-staging/v2beta2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py create mode 100644 owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py create mode 100644 owl-bot-staging/v2beta3/.coveragerc create mode 100644 owl-bot-staging/v2beta3/MANIFEST.in create mode 100644 owl-bot-staging/v2beta3/README.rst create mode 100644 owl-bot-staging/v2beta3/docs/conf.py create mode 100644 owl-bot-staging/v2beta3/docs/index.rst create mode 100644 owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst create mode 100644 owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst create mode 100644 owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks/py.typed create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py create mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py create mode 100644 owl-bot-staging/v2beta3/mypy.ini create mode 100644 owl-bot-staging/v2beta3/noxfile.py create mode 100644 owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py create mode 100644 owl-bot-staging/v2beta3/setup.py create mode 100644 owl-bot-staging/v2beta3/tests/__init__.py create mode 100644 owl-bot-staging/v2beta3/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py create mode 100644 owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc new file mode 100644 index 00000000..1d5bc53f --- /dev/null +++ b/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/tasks/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 00000000..4fdb4f57 --- /dev/null +++ b/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/tasks *.py +recursive-include google/cloud/tasks_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst new file mode 100644 index 00000000..6171a7e2 --- /dev/null +++ b/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Tasks API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Tasks API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 00000000..62c563cc --- /dev/null +++ b/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-tasks documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-tasks" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-tasks-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-tasks.tex", + u"google-cloud-tasks Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-tasks", + u"Google Cloud Tasks Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-tasks", + u"google-cloud-tasks Documentation", + author, + "google-cloud-tasks", + "GAPIC library for Google Cloud Tasks API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 00000000..f3d07358 --- /dev/null +++ b/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + tasks_v2/services + tasks_v2/types diff --git a/owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst b/owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst new file mode 100644 index 00000000..11481d7c --- /dev/null +++ b/owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst @@ -0,0 +1,10 @@ +CloudTasks +---------------------------- + +.. automodule:: google.cloud.tasks_v2.services.cloud_tasks + :members: + :inherited-members: + +.. automodule:: google.cloud.tasks_v2.services.cloud_tasks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2/docs/tasks_v2/services.rst b/owl-bot-staging/v2/docs/tasks_v2/services.rst new file mode 100644 index 00000000..f24b73b1 --- /dev/null +++ b/owl-bot-staging/v2/docs/tasks_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2 API +====================================== +.. toctree:: + :maxdepth: 2 + + cloud_tasks diff --git a/owl-bot-staging/v2/docs/tasks_v2/types.rst b/owl-bot-staging/v2/docs/tasks_v2/types.rst new file mode 100644 index 00000000..7a2a9a4b --- /dev/null +++ b/owl-bot-staging/v2/docs/tasks_v2/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Tasks v2 API +=================================== + +.. automodule:: google.cloud.tasks_v2.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v2/google/cloud/tasks/__init__.py b/owl-bot-staging/v2/google/cloud/tasks/__init__.py new file mode 100644 index 00000000..054d56f6 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks/__init__.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.tasks_v2.services.cloud_tasks.client import CloudTasksClient +from google.cloud.tasks_v2.services.cloud_tasks.async_client import CloudTasksAsyncClient + +from google.cloud.tasks_v2.types.cloudtasks import CreateQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import CreateTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import DeleteQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import DeleteTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import GetQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import GetTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import ListQueuesRequest +from google.cloud.tasks_v2.types.cloudtasks import ListQueuesResponse +from google.cloud.tasks_v2.types.cloudtasks import ListTasksRequest +from google.cloud.tasks_v2.types.cloudtasks import ListTasksResponse +from google.cloud.tasks_v2.types.cloudtasks import PauseQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import PurgeQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import ResumeQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import RunTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import UpdateQueueRequest +from google.cloud.tasks_v2.types.queue import Queue +from google.cloud.tasks_v2.types.queue import RateLimits +from google.cloud.tasks_v2.types.queue import RetryConfig +from google.cloud.tasks_v2.types.queue import StackdriverLoggingConfig +from google.cloud.tasks_v2.types.target import AppEngineHttpRequest +from google.cloud.tasks_v2.types.target import AppEngineRouting +from google.cloud.tasks_v2.types.target import HttpRequest +from google.cloud.tasks_v2.types.target import OAuthToken +from google.cloud.tasks_v2.types.target import OidcToken +from google.cloud.tasks_v2.types.target import HttpMethod +from google.cloud.tasks_v2.types.task import Attempt +from google.cloud.tasks_v2.types.task import Task + +__all__ = ('CloudTasksClient', + 'CloudTasksAsyncClient', + 'CreateQueueRequest', + 'CreateTaskRequest', + 'DeleteQueueRequest', + 'DeleteTaskRequest', + 'GetQueueRequest', + 'GetTaskRequest', + 'ListQueuesRequest', + 'ListQueuesResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'PauseQueueRequest', + 'PurgeQueueRequest', + 'ResumeQueueRequest', + 'RunTaskRequest', + 'UpdateQueueRequest', + 'Queue', + 'RateLimits', + 'RetryConfig', + 'StackdriverLoggingConfig', + 'AppEngineHttpRequest', + 'AppEngineRouting', + 'HttpRequest', + 'OAuthToken', + 'OidcToken', + 'HttpMethod', + 'Attempt', + 'Task', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks/py.typed b/owl-bot-staging/v2/google/cloud/tasks/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py new file mode 100644 index 00000000..43e4f607 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.cloud_tasks import CloudTasksClient +from .services.cloud_tasks import CloudTasksAsyncClient + +from .types.cloudtasks import CreateQueueRequest +from .types.cloudtasks import CreateTaskRequest +from .types.cloudtasks import DeleteQueueRequest +from .types.cloudtasks import DeleteTaskRequest +from .types.cloudtasks import GetQueueRequest +from .types.cloudtasks import GetTaskRequest +from .types.cloudtasks import ListQueuesRequest +from .types.cloudtasks import ListQueuesResponse +from .types.cloudtasks import ListTasksRequest +from .types.cloudtasks import ListTasksResponse +from .types.cloudtasks import PauseQueueRequest +from .types.cloudtasks import PurgeQueueRequest +from .types.cloudtasks import ResumeQueueRequest +from .types.cloudtasks import RunTaskRequest +from .types.cloudtasks import UpdateQueueRequest +from .types.queue import Queue +from .types.queue import RateLimits +from .types.queue import RetryConfig +from .types.queue import StackdriverLoggingConfig +from .types.target import AppEngineHttpRequest +from .types.target import AppEngineRouting +from .types.target import HttpRequest +from .types.target import OAuthToken +from .types.target import OidcToken +from .types.target import HttpMethod +from .types.task import Attempt +from .types.task import Task + +__all__ = ( + 'CloudTasksAsyncClient', +'AppEngineHttpRequest', +'AppEngineRouting', +'Attempt', +'CloudTasksClient', +'CreateQueueRequest', +'CreateTaskRequest', +'DeleteQueueRequest', +'DeleteTaskRequest', +'GetQueueRequest', +'GetTaskRequest', +'HttpMethod', +'HttpRequest', +'ListQueuesRequest', +'ListQueuesResponse', +'ListTasksRequest', +'ListTasksResponse', +'OAuthToken', +'OidcToken', +'PauseQueueRequest', +'PurgeQueueRequest', +'Queue', +'RateLimits', +'ResumeQueueRequest', +'RetryConfig', +'RunTaskRequest', +'StackdriverLoggingConfig', +'Task', +'UpdateQueueRequest', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json new file mode 100644 index 00000000..5cacaba9 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json @@ -0,0 +1,183 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.tasks_v2", + "protoPackage": "google.cloud.tasks.v2", + "schema": "1.0", + "services": { + "CloudTasks": { + "clients": { + "grpc": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudTasksAsyncClient", + "rpcs": { + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/py.typed b/owl-bot-staging/v2/google/cloud/tasks_v2/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py new file mode 100644 index 00000000..1478acb5 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudTasksClient +from .async_client import CloudTasksAsyncClient + +__all__ = ( + 'CloudTasksClient', + 'CloudTasksAsyncClient', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py new file mode 100644 index 00000000..e55241c7 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py @@ -0,0 +1,1801 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .client import CloudTasksClient + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + queue_path = staticmethod(CloudTasksClient.queue_path) + parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) + task_path = staticmethod(CloudTasksClient.task_path) + parse_task_path = staticmethod(CloudTasksClient.parse_task_path) + common_billing_account_path = staticmethod(CloudTasksClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudTasksClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudTasksClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudTasksClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudTasksClient.parse_common_organization_path) + common_project_path = staticmethod(CloudTasksClient.common_project_path) + parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) + common_location_path = staticmethod(CloudTasksClient.common_location_path) + parse_common_location_path = staticmethod(CloudTasksClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(CloudTasksClient).get_transport_class, type(CloudTasksClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_queues(self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`google.cloud.tasks_v2.types.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue(self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`google.cloud.tasks_v2.types.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_queue(self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2.types.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`google.cloud.tasks_v2.types.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot + be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_queue(self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2.types.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + queue (:class:`google.cloud.tasks_v2.types.Queue`): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("queue.name", request.queue.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_queue(self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2.types.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def purge_queue(self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`google.cloud.tasks_v2.types.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def pause_queue(self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Args: + request (:class:`google.cloud.tasks_v2.types.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_queue(self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`google.cloud.tasks_v2.types.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tasks(self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`google.cloud.tasks_v2.types.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task(self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`google.cloud.tasks_v2.types.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task(self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (:class:`google.cloud.tasks_v2.types.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.tasks_v2.types.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is + not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_task(self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (:class:`google.cloud.tasks_v2.types.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def run_task(self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (:class:`google.cloud.tasks_v2.types.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-tasks", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudTasksAsyncClient", +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py new file mode 100644 index 00000000..1209d0ef --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py @@ -0,0 +1,1940 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[CloudTasksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def queue_path(project: str,location: str,queue: str,) -> str: + """Returns a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str,str]: + """Parses a queue path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str,location: str,queue: str,task: str,) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str,str]: + """Parses a task path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudTasksTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_queues(self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (google.cloud.tasks_v2.types.ListQueuesRequest): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue(self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (google.cloud.tasks_v2.types.GetQueueRequest): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_queue(self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2.types.CreateQueueRequest): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot + be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_queue(self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2.types.UpdateQueueRequest): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("queue.name", request.queue.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_queue(self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2.types.DeleteQueueRequest): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def purge_queue(self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (google.cloud.tasks_v2.types.PurgeQueueRequest): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def pause_queue(self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Args: + request (google.cloud.tasks_v2.types.PauseQueueRequest): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_queue(self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (google.cloud.tasks_v2.types.ResumeQueueRequest): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for `GetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for `SetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for + `TestIamPermissions` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (Sequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tasks(self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (google.cloud.tasks_v2.types.ListTasksRequest): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task(self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (google.cloud.tasks_v2.types.GetTaskRequest): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task(self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (google.cloud.tasks_v2.types.CreateTaskRequest): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.tasks_v2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is + not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_task(self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (google.cloud.tasks_v2.types.DeleteTaskRequest): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def run_task(self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (google.cloud.tasks_v2.types.RunTaskRequest): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-tasks", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudTasksClient", +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py new file mode 100644 index 00000000..b8bff650 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py new file mode 100644 index 00000000..3db96829 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry['grpc'] = CloudTasksGrpcTransport +_transport_registry['grpc_asyncio'] = CloudTasksGrpcAsyncIOTransport + +__all__ = ( + 'CloudTasksTransport', + 'CloudTasksGrpcTransport', + 'CloudTasksGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py new file mode 100644 index 00000000..24108ca8 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-tasks', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudtasks.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, + default_timeout=10.0, + client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, + default_timeout=10.0, + client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, + default_timeout=10.0, + client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, + default_timeout=10.0, + client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, + default_timeout=10.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=10.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=10.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=10.0, + client_info=client_info, + ), + } + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + Union[ + cloudtasks.ListQueuesResponse, + Awaitable[cloudtasks.ListQueuesResponse] + ]]: + raise NotImplementedError() + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + Union[ + gct_queue.Queue, + Awaitable[gct_queue.Queue] + ]]: + raise NotImplementedError() + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Union[ + gct_queue.Queue, + Awaitable[gct_queue.Queue] + ]]: + raise NotImplementedError() + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + Union[ + cloudtasks.ListTasksResponse, + Awaitable[cloudtasks.ListTasksResponse] + ]]: + raise NotImplementedError() + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + Union[ + gct_task.Task, + Awaitable[gct_task.Task] + ]]: + raise NotImplementedError() + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'CloudTasksTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py new file mode 100644 index 00000000..bc22a2f2 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,779 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_queues' not in self._stubs: + self._stubs['list_queues'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/ListQueues', + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs['list_queues'] + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_queue' not in self._stubs: + self._stubs['get_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/GetQueue', + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['get_queue'] + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_queue' not in self._stubs: + self._stubs['create_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/CreateQueue', + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['create_queue'] + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_queue' not in self._stubs: + self._stubs['update_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/UpdateQueue', + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['update_queue'] + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_queue' not in self._stubs: + self._stubs['delete_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/DeleteQueue', + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_queue'] + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'purge_queue' not in self._stubs: + self._stubs['purge_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/PurgeQueue', + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['purge_queue'] + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pause_queue' not in self._stubs: + self._stubs['pause_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/PauseQueue', + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['pause_queue'] + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_queue' not in self._stubs: + self._stubs['resume_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/ResumeQueue', + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['resume_queue'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/ListTasks', + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/GetTask', + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/CreateTask', + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs['create_task'] + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/DeleteTask', + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_task'] + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/RunTask', + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['run_task'] + + +__all__ = ( + 'CloudTasksGrpcTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 00000000..c7ee527a --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,783 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + Awaitable[cloudtasks.ListQueuesResponse]]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_queues' not in self._stubs: + self._stubs['list_queues'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/ListQueues', + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs['list_queues'] + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_queue' not in self._stubs: + self._stubs['get_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/GetQueue', + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['get_queue'] + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_queue' not in self._stubs: + self._stubs['create_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/CreateQueue', + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['create_queue'] + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_queue' not in self._stubs: + self._stubs['update_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/UpdateQueue', + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['update_queue'] + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_queue' not in self._stubs: + self._stubs['delete_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/DeleteQueue', + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_queue'] + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'purge_queue' not in self._stubs: + self._stubs['purge_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/PurgeQueue', + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['purge_queue'] + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pause_queue' not in self._stubs: + self._stubs['pause_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/PauseQueue', + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['pause_queue'] + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_queue' not in self._stubs: + self._stubs['resume_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/ResumeQueue', + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['resume_queue'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + Awaitable[cloudtasks.ListTasksResponse]]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/ListTasks', + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/GetTask', + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/CreateTask', + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs['create_task'] + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/DeleteTask', + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_task'] + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2.CloudTasks/RunTask', + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['run_task'] + + +__all__ = ( + 'CloudTasksGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py new file mode 100644 index 00000000..5c3ae8fb --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudtasks import ( + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from .queue import ( + Queue, + RateLimits, + RetryConfig, + StackdriverLoggingConfig, +) +from .target import ( + AppEngineHttpRequest, + AppEngineRouting, + HttpRequest, + OAuthToken, + OidcToken, + HttpMethod, +) +from .task import ( + Attempt, + Task, +) + +__all__ = ( + 'CreateQueueRequest', + 'CreateTaskRequest', + 'DeleteQueueRequest', + 'DeleteTaskRequest', + 'GetQueueRequest', + 'GetTaskRequest', + 'ListQueuesRequest', + 'ListQueuesResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'PauseQueueRequest', + 'PurgeQueueRequest', + 'ResumeQueueRequest', + 'RunTaskRequest', + 'UpdateQueueRequest', + 'Queue', + 'RateLimits', + 'RetryConfig', + 'StackdriverLoggingConfig', + 'AppEngineHttpRequest', + 'AppEngineRouting', + 'HttpRequest', + 'OAuthToken', + 'OidcToken', + 'HttpMethod', + 'Attempt', + 'Task', +) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py new file mode 100644 index 00000000..953edc65 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py @@ -0,0 +1,558 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task as gct_task +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2', + manifest={ + 'ListQueuesRequest', + 'ListQueuesResponse', + 'GetQueueRequest', + 'CreateQueueRequest', + 'UpdateQueueRequest', + 'DeleteQueueRequest', + 'PurgeQueueRequest', + 'PauseQueueRequest', + 'ResumeQueueRequest', + 'ListTasksRequest', + 'ListTasksResponse', + 'GetTaskRequest', + 'CreateTaskRequest', + 'DeleteTaskRequest', + 'RunTaskRequest', + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2.Queue] field can be used as a + filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "state: PAUSED". + + Note that using filters might cause fewer queues than the + requested page_size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2.ListQueuesRequest.filter] + while iterating through pages. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Attributes: + queues (Sequence[google.cloud.tasks_v2.types.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot be + the same as an existing queue. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + queue = proto.Field( + proto.MESSAGE, + number=2, + message=gct_queue.Queue, + ) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + + Attributes: + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2.Queue.name] cannot + be changed. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue = proto.Field( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Attributes: + tasks (Sequence[google.cloud.tasks_v2.types.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] with + this value as the + [page_token][google.cloud.tasks.v2.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (google.cloud.tasks_v2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is not + specified then the system will generate a random unique task + id, which will be set in the task returned in the + [response][google.cloud.tasks.v2.Task.name]. + + If [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or executed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour after + the original task was deleted or executed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9days + after the original task was deleted or executed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + task = proto.Field( + proto.MESSAGE, + number=2, + message=gct_task.Task, + ) + response_view = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py new file mode 100644 index 00000000..6bba9e15 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py @@ -0,0 +1,434 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2.types import target +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2', + manifest={ + 'Queue', + 'RateLimits', + 'RetryConfig', + 'StackdriverLoggingConfig', + }, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + queue types, and others. + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_routing_override (google.cloud.tasks_v2.types.AppEngineRouting): + Overrides for [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + These settings apply only to [App Engine + tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this + queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are + not affected. + + If set, ``app_engine_routing_override`` is used for all [App + Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in + the queue, no matter what the setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + rate_limits (google.cloud.tasks_v2.types.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] and + [retry_config][google.cloud.tasks.v2.Queue.retry_config] are + related because they both control task attempts. However + they control task attempts in different ways: + + - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + + The queue's actual dispatch rate is the result of: + + - Number of tasks in the queue + - User-specified throttling: + [rate_limits][google.cloud.tasks.v2.Queue.rate_limits], + [retry_config][google.cloud.tasks.v2.Queue.retry_config], + and the [queue's + state][google.cloud.tasks.v2.Queue.state]. + - System throttling due to ``429`` (Too Many Requests) or + ``503`` (Service Unavailable) responses from the worker, + high error rates, or to smooth sudden large traffic + spikes. + retry_config (google.cloud.tasks_v2.types.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (google.cloud.tasks_v2.types.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by called + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2.Task.create_time] before + this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + stackdriver_logging_config (google.cloud.tasks_v2.types.StackdriverLoggingConfig): + Configuration options for writing logs to `Stackdriver + Logging `__. If this + field is unset, then no logs are written. + """ + class State(proto.Enum): + r"""State of the queue.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + name = proto.Field( + proto.STRING, + number=1, + ) + app_engine_routing_override = proto.Field( + proto.MESSAGE, + number=2, + message=target.AppEngineRouting, + ) + rate_limits = proto.Field( + proto.MESSAGE, + number=3, + message='RateLimits', + ) + retry_config = proto.Field( + proto.MESSAGE, + number=4, + message='RetryConfig', + ) + state = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + purge_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + stackdriver_logging_config = proto.Field( + proto.MESSAGE, + number=9, + message='StackdriverLoggingConfig', + ) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask], will run a task + even if the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits]. + + Attributes: + max_dispatches_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - The maximum allowed value is 500. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + Output only. The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + + Cloud Tasks will pick the value of ``max_burst_size`` based + on the value of + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + + For queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + Since ``max_burst_size`` is output only, if + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] + is called on a queue created by ``queue.yaml/xml``, + ``max_burst_size`` will be reset based on the value of + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second], + regardless of whether + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second] + is updated. + max_concurrent_dispatches (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_dispatches_per_second = proto.Field( + proto.DOUBLE, + number=1, + ) + max_burst_size = proto.Field( + proto.INT32, + number=2, + ) + max_concurrent_dispatches = proto.Field( + proto.INT32, + number=3, + ) + + +class RetryConfig(proto.Message): + r"""Retry config. + These settings determine when a failed task attempt is retried. + + Attributes: + max_attempts (int): + Number of attempts per task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be >= -1. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + -1 indicates unlimited attempts. + + This field has the same meaning as `task_retry_limit in + queue.yaml/xml `__. + max_retry_duration (google.protobuf.duration_pb2.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2.Task.schedule_time] for + retry between + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies + that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2.Task.schedule_time] for + retry between + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies + that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries retries at intervals of + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts = proto.Field( + proto.INT32, + number=1, + ) + max_retry_duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + min_backoff = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + max_backoff = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_doublings = proto.Field( + proto.INT32, + number=5, + ) + + +class StackdriverLoggingConfig(proto.Message): + r"""Configuration options for writing logs to `Stackdriver + Logging `__. + + Attributes: + sampling_ratio (float): + Specifies the fraction of operations to write to + `Stackdriver + Logging `__. This + field may contain any value between 0.0 and 1.0, inclusive. + 0.0 is the default and means that no operations are logged. + """ + + sampling_ratio = proto.Field( + proto.DOUBLE, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py new file mode 100644 index 00000000..c2573691 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py @@ -0,0 +1,548 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2', + manifest={ + 'HttpMethod', + 'HttpRequest', + 'AppEngineHttpRequest', + 'AppEngineRouting', + 'OAuthToken', + 'OidcToken', + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to deliver the task.""" + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class HttpRequest(proto.Message): + r"""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the + worker or the redirected worker acknowledges the task by returning a + successful HTTP response code ([``200`` - ``299``]), the task will + be removed from the queue. If any other HTTP response code is + returned or no response is received, the task will be retried + according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2.Queue.retry_config], [rate + limits][google.cloud.tasks.v2.Queue.rate_limits], and the + [queue's state][google.cloud.tasks.v2.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff + specified in [rate + limits][google.cloud.tasks.v2.Queue.rate_limits] will be used. + But if the worker returns ``429`` (Too Many Requests), ``503`` + (Service Unavailable), or the rate of errors is high, Cloud Tasks + will use a higher backoff rate. The retry specified in the + ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden increases in + traffic, dispatches ramp up slowly when the queue is newly + created or idle and if large numbers of tasks suddenly become + available to dispatch (due to spikes in create task rates, the + queue being unpaused, or many tasks that are scheduled at the + same time). + + Attributes: + url (str): + Required. The full url path that the request will be sent + to. + + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode + some characters for safety and compatibility. The maximum + allowed URL length is 2083 characters after encoding. + + The ``Location`` header response from a redirect response + [``300`` - ``399``] may be followed. The redirect is not + counted as a separate attempt. + http_method (google.cloud.tasks_v2.types.HttpMethod): + The HTTP method to use for the request. The + default is POST. + headers (Sequence[google.cloud.tasks_v2.types.HttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + - X-Google-*: Google use only. + - X-AppEngine-*: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. + body (bytes): + HTTP request body. + + A request body is allowed only if the [HTTP + method][google.cloud.tasks.v2.HttpRequest.http_method] is + POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + oauth_token (google.cloud.tasks_v2.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + oidc_token (google.cloud.tasks_v2.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + """ + + url = proto.Field( + proto.STRING, + number=1, + ) + http_method = proto.Field( + proto.ENUM, + number=2, + enum='HttpMethod', + ) + headers = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + body = proto.Field( + proto.BYTES, + number=4, + ) + oauth_token = proto.Field( + proto.MESSAGE, + number=5, + oneof='authorization_header', + message='OAuthToken', + ) + oidc_token = proto.Field( + proto.MESSAGE, + number=6, + oneof='authorization_header', + message='OidcToken', + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] used + to construct the URL that the task is delivered to can be set at the + queue-level or task-level: + + - If [app_engine_routing_override is set on the + queue][Queue.app_engine_routing_override], this value is used for + all tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` [host][google.cloud.tasks.v2.AppEngineRouting.host] + ``+`` + [relative_uri][google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][google.cloud.tasks.v2.Task.dispatch_deadline]. Failed + tasks will be retried according to the [retry + configuration][google.cloud.tasks.v2.Queue.retry_config]. ``503`` + (Service Unavailable) is considered an App Engine system error + instead of an application error and will cause Cloud Tasks' traffic + congestion control to temporarily throttle the queue's dispatches. + Unlike other types of task targets, a ``429`` (Too Many Requests) + response from an app handler does not cause traffic congestion + control to throttle the queue. + + Attributes: + http_method (google.cloud.tasks_v2.types.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt will fail with error code 405 + (Method Not Allowed). See `Writing a push task request + handler `__ + and the documentation for the request handlers in the + language your app is written in e.g. `Python Request + Handler `__. + app_engine_routing (google.cloud.tasks_v2.types.AppEngineRouting): + Task-level setting for App Engine routing. + + - If [app_engine_routing_override is set on the + queue][Queue.app_engine_routing_override], this value is + used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][AppEngineHttpRequest.app_engine_routing]. + relative_uri (str): + The relative URI. + The relative URI must begin with "/" and must be + a valid HTTP relative URI. It can contain a path + and query string arguments. If the relative URI + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (Sequence[google.cloud.tasks_v2.types.AppEngineHttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [body][google.cloud.tasks.v2.AppEngineHttpRequest.body], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-*`` + - ``X-AppEngine-*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + documentation. + body (bytes): + HTTP request body. + + A request body is allowed only if the HTTP method is POST or + PUT. It is an error to set a body on a task with an + incompatible [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + """ + + http_method = proto.Field( + proto.ENUM, + number=1, + enum='HttpMethod', + ) + app_engine_routing = proto.Field( + proto.MESSAGE, + number=2, + message='AppEngineRouting', + ) + relative_uri = proto.Field( + proto.STRING, + number=3, + ) + headers = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + body = proto.Field( + proto.BYTES, + number=5, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Using [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable into + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable, then + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable into + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable, then + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + The host is constructed from the domain name of the app + associated with the queue's project ID (for example + .appspot.com), and the + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + Tasks which were created using the App Engine SDK might have + a custom domain name. + + For more information, see `How Requests are + Routed `__. + """ + + service = proto.Field( + proto.STRING, + number=1, + ) + version = proto.Field( + proto.STRING, + number=2, + ) + instance = proto.Field( + proto.STRING, + number=3, + ) + host = proto.Field( + proto.STRING, + number=4, + ) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email = proto.Field( + proto.STRING, + number=1, + ) + scope = proto.Field( + proto.STRING, + number=2, + ) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email = proto.Field( + proto.STRING, + number=1, + ) + audience = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py new file mode 100644 index 00000000..69c7c3ad --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2.types import target +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2', + manifest={ + 'Task', + 'Attempt', + }, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (google.cloud.tasks_v2.types.AppEngineHttpRequest): + HTTP request that is sent to the App Engine app handler. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] + set. + http_request (google.cloud.tasks_v2.types.HttpRequest): + HTTP request that is sent to the worker. + + An HTTP task is a task that has + [HttpRequest][google.cloud.tasks.v2.HttpRequest] set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the task is scheduled to be attempted or + retried. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + dispatch_deadline (google.protobuf.duration_pb2.Duration): + The deadline for requests sent to the worker. If the worker + does not respond by this deadline then the request is + cancelled and the attempt is marked as a + ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the + task according to the + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + Note that when the request is cancelled, Cloud Tasks will + stop listing for the response, but whether the worker stops + processing depends on the worker. For example, if the worker + is stuck, it may not react to cancelled requests. + + The default and maximum values depend on the type of + request: + + - For [HTTP tasks][google.cloud.tasks.v2.HttpRequest], the + default is 10 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine + tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 + indicates that the request has the default deadline. The + default deadline depends on the `scaling + type `__ + of the service: 10 minutes for standard apps with + automatic scaling, 24 hours for standard apps with manual + and basic scaling, and 60 minutes for flex apps. If the + request deadline is set, it must be in the interval [15 + seconds, 24 hours 15 seconds]. Regardless of the task's + ``dispatch_deadline``, the app handler will not run for + longer than than the service's timeout. We recommend + setting the ``dispatch_deadline`` to at most a few + seconds more than the app handler's timeout. For more + information see + `Timeouts `__. + + ``dispatch_deadline`` will be truncated to the nearest + millisecond. The deadline is an approximate deadline. + dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + response_count (int): + Output only. The number of attempts which + have received a response. + first_attempt (google.cloud.tasks_v2.types.Attempt): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2.Attempt.dispatch_time] + will be set. The other + [Attempt][google.cloud.tasks.v2.Attempt] information is not + retained by Cloud Tasks. + last_attempt (google.cloud.tasks_v2.types.Attempt): + Output only. The status of the task's last + attempt. + view (google.cloud.tasks_v2.types.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] has been returned. + """ + class View(proto.Enum): + r"""The view specifies a subset of [Task][google.cloud.tasks.v2.Task] + data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_request = proto.Field( + proto.MESSAGE, + number=2, + oneof='message_type', + message=target.AppEngineHttpRequest, + ) + http_request = proto.Field( + proto.MESSAGE, + number=3, + oneof='message_type', + message=target.HttpRequest, + ) + schedule_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + dispatch_deadline = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + dispatch_count = proto.Field( + proto.INT32, + number=7, + ) + response_count = proto.Field( + proto.INT32, + number=8, + ) + first_attempt = proto.Field( + proto.MESSAGE, + number=9, + message='Attempt', + ) + last_attempt = proto.Field( + proto.MESSAGE, + number=10, + message='Attempt', + ) + view = proto.Field( + proto.ENUM, + number=11, + enum=View, + ) + + +class Attempt(proto.Message): + r"""The status of a task attempt. + Attributes: + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (google.rpc.status_pb2.Status): + Output only. The response from the worker for this attempt. + + If ``response_time`` is unset, then the task has not been + attempted or is currently running and the + ``response_status`` field is meaningless. + """ + + schedule_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + dispatch_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + response_status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py new file mode 100644 index 00000000..e9ae5e3c --- /dev/null +++ b/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/tasks_v2/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py new file mode 100644 index 00000000..fdfc7b38 --- /dev/null +++ b/owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', ), + 'get_task': ('name', 'response_view', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py new file mode 100644 index 00000000..2254eec1 --- /dev/null +++ b/owl-bot-staging/v2/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-tasks', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py b/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py new file mode 100644 index 00000000..c2ea1d26 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py @@ -0,0 +1,5181 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.tasks_v2.services.cloud_tasks import CloudTasksAsyncClient +from google.cloud.tasks_v2.services.cloud_tasks import CloudTasksClient +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.services.cloud_tasks import transports +from google.cloud.tasks_v2.services.cloud_tasks.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + available_transports = [ + transports.CloudTasksGrpcTransport, + ] + assert transport in available_transports + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) +@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) +def test_cloud_tasks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) +@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_tasks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_tasks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_tasks_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_queues(transport: str = 'grpc', request_type=cloudtasks.ListQueuesRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_queues_from_dict(): + test_list_queues(request_type=dict) + + +def test_list_queues_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + client.list_queues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + +@pytest.mark.asyncio +async def test_list_queues_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListQueuesRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_queues_async_from_dict(): + await test_list_queues_async(request_type=dict) + + +def test_list_queues_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + call.return_value = cloudtasks.ListQueuesResponse() + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_queues_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_queues_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), + parent='parent_value', + ) + + +def test_list_queues_pager(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) + for i in results) + +def test_list_queues_pages(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_queues(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) + for i in responses) + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_queues(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_queue(transport: str = 'grpc', request_type=cloudtasks.GetQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + ) + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_from_dict(): + test_get_queue(request_type=dict) + + +def test_get_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + client.get_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + +@pytest.mark.asyncio +async def test_get_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_get_queue_async_from_dict(): + await test_get_queue_async(request_type=dict) + + +def test_get_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), + name='name_value', + ) + + +def test_create_queue(transport: str = 'grpc', request_type=cloudtasks.CreateQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + ) + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_from_dict(): + test_create_queue(request_type=dict) + + +def test_create_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + client.create_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + +@pytest.mark.asyncio +async def test_create_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + )) + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_create_queue_async_from_dict(): + await test_create_queue_async(request_type=dict) + + +def test_create_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + call.return_value = gct_queue.Queue() + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].queue == gct_queue.Queue(name='name_value') + + +def test_create_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].queue == gct_queue.Queue(name='name_value') + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + +def test_update_queue(transport: str = 'grpc', request_type=cloudtasks.UpdateQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + ) + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_from_dict(): + test_update_queue(request_type=dict) + + +def test_update_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + client.update_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + +@pytest.mark.asyncio +async def test_update_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.UpdateQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + )) + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_update_queue_async_from_dict(): + await test_update_queue_async(request_type=dict) + + +def test_update_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = 'queue.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + call.return_value = gct_queue.Queue() + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'queue.name=queue.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = 'queue.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'queue.name=queue.name/value', + ) in kw['metadata'] + + +def test_update_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].queue == gct_queue.Queue(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].queue == gct_queue.Queue(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_queue(transport: str = 'grpc', request_type=cloudtasks.DeleteQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_from_dict(): + test_delete_queue(request_type=dict) + + +def test_delete_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + client.delete_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + +@pytest.mark.asyncio +async def test_delete_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_queue_async_from_dict(): + await test_delete_queue_async(request_type=dict) + + +def test_delete_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + call.return_value = None + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name='name_value', + ) + + +def test_purge_queue(transport: str = 'grpc', request_type=cloudtasks.PurgeQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + ) + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_from_dict(): + test_purge_queue(request_type=dict) + + +def test_purge_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + client.purge_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + +@pytest.mark.asyncio +async def test_purge_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PurgeQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_purge_queue_async_from_dict(): + await test_purge_queue_async(request_type=dict) + + +def test_purge_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_purge_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name='name_value', + ) + + +def test_pause_queue(transport: str = 'grpc', request_type=cloudtasks.PauseQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + ) + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_from_dict(): + test_pause_queue(request_type=dict) + + +def test_pause_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + client.pause_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + +@pytest.mark.asyncio +async def test_pause_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PauseQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_pause_queue_async_from_dict(): + await test_pause_queue_async(request_type=dict) + + +def test_pause_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_pause_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), + name='name_value', + ) + + +def test_resume_queue(transport: str = 'grpc', request_type=cloudtasks.ResumeQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + ) + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_from_dict(): + test_resume_queue(request_type=dict) + + +def test_resume_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + client.resume_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + +@pytest.mark.asyncio +async def test_resume_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ResumeQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_resume_queue_async_from_dict(): + await test_resume_queue_async(request_type=dict) + + +def test_resume_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_resume_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name='name_value', + ) + + +def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_list_tasks(transport: str = 'grpc', request_type=cloudtasks.ListTasksRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token='next_page_token_value', + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_tasks_from_dict(): + test_list_tasks(request_type=dict) + + +def test_list_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + client.list_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListTasksRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + + +def test_list_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = cloudtasks.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), + parent='parent_value', + ) + + +def test_list_tasks_pager(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, task.Task) + for i in results) + +def test_list_tasks_pages(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) + for i in responses) + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_tasks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_task(transport: str = 'grpc', request_type=cloudtasks.GetTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_get_task_from_dict(): + test_get_task(request_type=dict) + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + )) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + + +def test_get_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = task.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), + name='name_value', + ) + + +def test_create_task(transport: str = 'grpc', request_type=cloudtasks.CreateTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_from_dict(): + test_create_task(request_type=dict) + + +def test_create_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + client.create_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + )) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + + +def test_create_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = gct_task.Task() + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].task == gct_task.Task(name='name_value') + + +def test_create_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].task == gct_task.Task(name='name_value') + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + +def test_delete_task(transport: str = 'grpc', request_type=cloudtasks.DeleteTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_from_dict(): + test_delete_task(request_type=dict) + + +def test_delete_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + client.delete_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + + +def test_delete_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = None + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), + name='name_value', + ) + + +def test_run_task(transport: str = 'grpc', request_type=cloudtasks.RunTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_run_task_from_dict(): + test_run_task(request_type=dict) + + +def test_run_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + client.run_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RunTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + )) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + + +def test_run_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = task.Task() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_run_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_run_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudTasksGrpcTransport, + ) + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_queues', + 'get_queue', + 'create_queue', + 'update_queue', + 'delete_queue', + 'purge_queue', + 'pause_queue', + 'resume_queue', + 'get_iam_policy', + 'set_iam_policy', + 'test_iam_permissions', + 'list_tasks', + 'get_task', + 'create_task', + 'delete_task', + 'run_task', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudTasksGrpcTransport, grpc_helpers), + (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_cloud_tasks_host_no_port(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com'), + ) + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +def test_cloud_tasks_host_with_port(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com:8000'), + ) + assert client.transport._host == 'cloudtasks.googleapis.com:8000' + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + expected = "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + +def test_task_path(): + project = "cuttlefish" + location = "mussel" + queue = "winkle" + task = "nautilus" + expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "scallop", + "location": "abalone", + "queue": "squid", + "task": "clam", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudTasksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudTasksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudTasksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudTasksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudTasksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudTasksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = CloudTasksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudTasksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudTasksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudTasksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v2beta2/.coveragerc b/owl-bot-staging/v2beta2/.coveragerc new file mode 100644 index 00000000..1d5bc53f --- /dev/null +++ b/owl-bot-staging/v2beta2/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/tasks/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2beta2/MANIFEST.in b/owl-bot-staging/v2beta2/MANIFEST.in new file mode 100644 index 00000000..fa24e936 --- /dev/null +++ b/owl-bot-staging/v2beta2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/tasks *.py +recursive-include google/cloud/tasks_v2beta2 *.py diff --git a/owl-bot-staging/v2beta2/README.rst b/owl-bot-staging/v2beta2/README.rst new file mode 100644 index 00000000..6171a7e2 --- /dev/null +++ b/owl-bot-staging/v2beta2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Tasks API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Tasks API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2beta2/docs/conf.py b/owl-bot-staging/v2beta2/docs/conf.py new file mode 100644 index 00000000..62c563cc --- /dev/null +++ b/owl-bot-staging/v2beta2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-tasks documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-tasks" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-tasks-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-tasks.tex", + u"google-cloud-tasks Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-tasks", + u"Google Cloud Tasks Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-tasks", + u"google-cloud-tasks Documentation", + author, + "google-cloud-tasks", + "GAPIC library for Google Cloud Tasks API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2beta2/docs/index.rst b/owl-bot-staging/v2beta2/docs/index.rst new file mode 100644 index 00000000..56b75351 --- /dev/null +++ b/owl-bot-staging/v2beta2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + tasks_v2beta2/services + tasks_v2beta2/types diff --git a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst new file mode 100644 index 00000000..eacf8fb7 --- /dev/null +++ b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst @@ -0,0 +1,10 @@ +CloudTasks +---------------------------- + +.. automodule:: google.cloud.tasks_v2beta2.services.cloud_tasks + :members: + :inherited-members: + +.. automodule:: google.cloud.tasks_v2beta2.services.cloud_tasks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst new file mode 100644 index 00000000..4273c20e --- /dev/null +++ b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2beta2 API +=========================================== +.. toctree:: + :maxdepth: 2 + + cloud_tasks diff --git a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst new file mode 100644 index 00000000..4a688d27 --- /dev/null +++ b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Tasks v2beta2 API +======================================== + +.. automodule:: google.cloud.tasks_v2beta2.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py new file mode 100644 index 00000000..a26a8d59 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.tasks_v2beta2.services.cloud_tasks.client import CloudTasksClient +from google.cloud.tasks_v2beta2.services.cloud_tasks.async_client import CloudTasksAsyncClient + +from google.cloud.tasks_v2beta2.types.cloudtasks import AcknowledgeTaskRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import CancelLeaseRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import CreateQueueRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import CreateTaskRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import DeleteQueueRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import DeleteTaskRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import GetQueueRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import GetTaskRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import LeaseTasksRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import LeaseTasksResponse +from google.cloud.tasks_v2beta2.types.cloudtasks import ListQueuesRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import ListQueuesResponse +from google.cloud.tasks_v2beta2.types.cloudtasks import ListTasksRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import ListTasksResponse +from google.cloud.tasks_v2beta2.types.cloudtasks import PauseQueueRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import PurgeQueueRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import RenewLeaseRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import ResumeQueueRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import RunTaskRequest +from google.cloud.tasks_v2beta2.types.cloudtasks import UpdateQueueRequest +from google.cloud.tasks_v2beta2.types.queue import Queue +from google.cloud.tasks_v2beta2.types.queue import QueueStats +from google.cloud.tasks_v2beta2.types.queue import RateLimits +from google.cloud.tasks_v2beta2.types.queue import RetryConfig +from google.cloud.tasks_v2beta2.types.target import AppEngineHttpRequest +from google.cloud.tasks_v2beta2.types.target import AppEngineHttpTarget +from google.cloud.tasks_v2beta2.types.target import AppEngineRouting +from google.cloud.tasks_v2beta2.types.target import PullMessage +from google.cloud.tasks_v2beta2.types.target import PullTarget +from google.cloud.tasks_v2beta2.types.target import HttpMethod +from google.cloud.tasks_v2beta2.types.task import AttemptStatus +from google.cloud.tasks_v2beta2.types.task import Task +from google.cloud.tasks_v2beta2.types.task import TaskStatus + +__all__ = ('CloudTasksClient', + 'CloudTasksAsyncClient', + 'AcknowledgeTaskRequest', + 'CancelLeaseRequest', + 'CreateQueueRequest', + 'CreateTaskRequest', + 'DeleteQueueRequest', + 'DeleteTaskRequest', + 'GetQueueRequest', + 'GetTaskRequest', + 'LeaseTasksRequest', + 'LeaseTasksResponse', + 'ListQueuesRequest', + 'ListQueuesResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'PauseQueueRequest', + 'PurgeQueueRequest', + 'RenewLeaseRequest', + 'ResumeQueueRequest', + 'RunTaskRequest', + 'UpdateQueueRequest', + 'Queue', + 'QueueStats', + 'RateLimits', + 'RetryConfig', + 'AppEngineHttpRequest', + 'AppEngineHttpTarget', + 'AppEngineRouting', + 'PullMessage', + 'PullTarget', + 'HttpMethod', + 'AttemptStatus', + 'Task', + 'TaskStatus', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks/py.typed b/owl-bot-staging/v2beta2/google/cloud/tasks/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py new file mode 100644 index 00000000..8fb2af2c --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.cloud_tasks import CloudTasksClient +from .services.cloud_tasks import CloudTasksAsyncClient + +from .types.cloudtasks import AcknowledgeTaskRequest +from .types.cloudtasks import CancelLeaseRequest +from .types.cloudtasks import CreateQueueRequest +from .types.cloudtasks import CreateTaskRequest +from .types.cloudtasks import DeleteQueueRequest +from .types.cloudtasks import DeleteTaskRequest +from .types.cloudtasks import GetQueueRequest +from .types.cloudtasks import GetTaskRequest +from .types.cloudtasks import LeaseTasksRequest +from .types.cloudtasks import LeaseTasksResponse +from .types.cloudtasks import ListQueuesRequest +from .types.cloudtasks import ListQueuesResponse +from .types.cloudtasks import ListTasksRequest +from .types.cloudtasks import ListTasksResponse +from .types.cloudtasks import PauseQueueRequest +from .types.cloudtasks import PurgeQueueRequest +from .types.cloudtasks import RenewLeaseRequest +from .types.cloudtasks import ResumeQueueRequest +from .types.cloudtasks import RunTaskRequest +from .types.cloudtasks import UpdateQueueRequest +from .types.queue import Queue +from .types.queue import QueueStats +from .types.queue import RateLimits +from .types.queue import RetryConfig +from .types.target import AppEngineHttpRequest +from .types.target import AppEngineHttpTarget +from .types.target import AppEngineRouting +from .types.target import PullMessage +from .types.target import PullTarget +from .types.target import HttpMethod +from .types.task import AttemptStatus +from .types.task import Task +from .types.task import TaskStatus + +__all__ = ( + 'CloudTasksAsyncClient', +'AcknowledgeTaskRequest', +'AppEngineHttpRequest', +'AppEngineHttpTarget', +'AppEngineRouting', +'AttemptStatus', +'CancelLeaseRequest', +'CloudTasksClient', +'CreateQueueRequest', +'CreateTaskRequest', +'DeleteQueueRequest', +'DeleteTaskRequest', +'GetQueueRequest', +'GetTaskRequest', +'HttpMethod', +'LeaseTasksRequest', +'LeaseTasksResponse', +'ListQueuesRequest', +'ListQueuesResponse', +'ListTasksRequest', +'ListTasksResponse', +'PauseQueueRequest', +'PullMessage', +'PullTarget', +'PurgeQueueRequest', +'Queue', +'QueueStats', +'RateLimits', +'RenewLeaseRequest', +'ResumeQueueRequest', +'RetryConfig', +'RunTaskRequest', +'Task', +'TaskStatus', +'UpdateQueueRequest', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json new file mode 100644 index 00000000..777cbb14 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json @@ -0,0 +1,223 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.tasks_v2beta2", + "protoPackage": "google.cloud.tasks.v2beta2", + "schema": "1.0", + "services": { + "CloudTasks": { + "clients": { + "grpc": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "AcknowledgeTask": { + "methods": [ + "acknowledge_task" + ] + }, + "CancelLease": { + "methods": [ + "cancel_lease" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "LeaseTasks": { + "methods": [ + "lease_tasks" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "RenewLease": { + "methods": [ + "renew_lease" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudTasksAsyncClient", + "rpcs": { + "AcknowledgeTask": { + "methods": [ + "acknowledge_task" + ] + }, + "CancelLease": { + "methods": [ + "cancel_lease" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "LeaseTasks": { + "methods": [ + "lease_tasks" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "RenewLease": { + "methods": [ + "renew_lease" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py new file mode 100644 index 00000000..1478acb5 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudTasksClient +from .async_client import CloudTasksAsyncClient + +__all__ = ( + 'CloudTasksClient', + 'CloudTasksAsyncClient', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py new file mode 100644 index 00000000..a544c2c9 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py @@ -0,0 +1,2249 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .client import CloudTasksClient + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + queue_path = staticmethod(CloudTasksClient.queue_path) + parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) + task_path = staticmethod(CloudTasksClient.task_path) + parse_task_path = staticmethod(CloudTasksClient.parse_task_path) + common_billing_account_path = staticmethod(CloudTasksClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudTasksClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudTasksClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudTasksClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudTasksClient.parse_common_organization_path) + common_project_path = staticmethod(CloudTasksClient.common_project_path) + parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) + common_location_path = staticmethod(CloudTasksClient.common_location_path) + parse_common_location_path = staticmethod(CloudTasksClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(CloudTasksClient).get_transport_class, type(CloudTasksClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_queues(self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue(self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_queue(self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`google.cloud.tasks_v2beta2.types.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_queue(self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + queue (:class:`google.cloud.tasks_v2beta2.types.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("queue.name", request.queue.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_queue(self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def purge_queue(self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def pause_queue(self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_queue(self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tasks(self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task(self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task(self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.tasks_v2beta2.types.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or completed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or completed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + completed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_task(self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def lease_tasks(self, + request: cloudtasks.LeaseTasksRequest = None, + *, + parent: str = None, + lease_duration: duration_pb2.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.LeaseTasksResponse: + r"""Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.LeaseTasksRequest`): + The request object. Request message for leasing tasks + using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`google.protobuf.duration_pb2.Duration`): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.LeaseTasksResponse: + Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.LeaseTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lease_tasks, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def acknowledge_task(self, + request: cloudtasks.AcknowledgeTaskRequest = None, + *, + name: str = None, + schedule_time: timestamp_pb2.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.AcknowledgeTaskRequest`): + The request object. Request message for acknowledging a + task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.AcknowledgeTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.acknowledge_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def renew_lease(self, + request: cloudtasks.RenewLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp_pb2.Timestamp = None, + lease_duration: duration_pb2.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.RenewLeaseRequest`): + The request object. Request message for renewing a lease + using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`google.protobuf.duration_pb2.Duration`): + Required. The desired new lease duration, starting from + now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.RenewLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.renew_lease, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_lease(self, + request: cloudtasks.CancelLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp_pb2.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.CancelLeaseRequest`): + The request object. Request message for canceling a + lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.CancelLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_lease, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_task(self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Args: + request (:class:`google.cloud.tasks_v2beta2.types.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-tasks", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudTasksAsyncClient", +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py new file mode 100644 index 00000000..169550c4 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py @@ -0,0 +1,2388 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[CloudTasksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def queue_path(project: str,location: str,queue: str,) -> str: + """Returns a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str,str]: + """Parses a queue path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str,location: str,queue: str,task: str,) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str,str]: + """Parses a task path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudTasksTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_queues(self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue(self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (google.cloud.tasks_v2beta2.types.GetQueueRequest): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_queue(self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2beta2.types.CreateQueueRequest): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_queue(self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2beta2.types.UpdateQueueRequest): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("queue.name", request.queue.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_queue(self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2beta2.types.DeleteQueueRequest): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def purge_queue(self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (google.cloud.tasks_v2beta2.types.PurgeQueueRequest): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def pause_queue(self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Args: + request (google.cloud.tasks_v2beta2.types.PauseQueueRequest): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_queue(self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (google.cloud.tasks_v2beta2.types.ResumeQueueRequest): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for `GetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for `SetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for + `TestIamPermissions` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (Sequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tasks(self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (google.cloud.tasks_v2beta2.types.ListTasksRequest): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task(self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (google.cloud.tasks_v2beta2.types.GetTaskRequest): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task(self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Args: + request (google.cloud.tasks_v2beta2.types.CreateTaskRequest): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.tasks_v2beta2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or completed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or completed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + completed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_task(self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Args: + request (google.cloud.tasks_v2beta2.types.DeleteTaskRequest): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def lease_tasks(self, + request: cloudtasks.LeaseTasksRequest = None, + *, + parent: str = None, + lease_duration: duration_pb2.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.LeaseTasksResponse: + r"""Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Args: + request (google.cloud.tasks_v2beta2.types.LeaseTasksRequest): + The request object. Request message for leasing tasks + using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.LeaseTasksResponse: + Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.LeaseTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.LeaseTasksRequest): + request = cloudtasks.LeaseTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lease_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def acknowledge_task(self, + request: cloudtasks.AcknowledgeTaskRequest = None, + *, + name: str = None, + schedule_time: timestamp_pb2.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Args: + request (google.cloud.tasks_v2beta2.types.AcknowledgeTaskRequest): + The request object. Request message for acknowledging a + task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.AcknowledgeTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.AcknowledgeTaskRequest): + request = cloudtasks.AcknowledgeTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def renew_lease(self, + request: cloudtasks.RenewLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp_pb2.Timestamp = None, + lease_duration: duration_pb2.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Args: + request (google.cloud.tasks_v2beta2.types.RenewLeaseRequest): + The request object. Request message for renewing a lease + using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The desired new lease duration, starting from + now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RenewLeaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RenewLeaseRequest): + request = cloudtasks.RenewLeaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.renew_lease] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_lease(self, + request: cloudtasks.CancelLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp_pb2.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Args: + request (google.cloud.tasks_v2beta2.types.CancelLeaseRequest): + The request object. Request message for canceling a + lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CancelLeaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CancelLeaseRequest): + request = cloudtasks.CancelLeaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_lease] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_task(self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Args: + request (google.cloud.tasks_v2beta2.types.RunTaskRequest): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-tasks", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudTasksClient", +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py new file mode 100644 index 00000000..e2e75405 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py new file mode 100644 index 00000000..3db96829 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry['grpc'] = CloudTasksGrpcTransport +_transport_registry['grpc_asyncio'] = CloudTasksGrpcAsyncIOTransport + +__all__ = ( + 'CloudTasksTransport', + 'CloudTasksGrpcTransport', + 'CloudTasksGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py new file mode 100644 index 00000000..1e023912 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py @@ -0,0 +1,497 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-tasks', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudtasks.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=20.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.lease_tasks: gapic_v1.method.wrap_method( + self.lease_tasks, + default_timeout=20.0, + client_info=client_info, + ), + self.acknowledge_task: gapic_v1.method.wrap_method( + self.acknowledge_task, + default_timeout=20.0, + client_info=client_info, + ), + self.renew_lease: gapic_v1.method.wrap_method( + self.renew_lease, + default_timeout=20.0, + client_info=client_info, + ), + self.cancel_lease: gapic_v1.method.wrap_method( + self.cancel_lease, + default_timeout=20.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=20.0, + client_info=client_info, + ), + } + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + Union[ + cloudtasks.ListQueuesResponse, + Awaitable[cloudtasks.ListQueuesResponse] + ]]: + raise NotImplementedError() + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + Union[ + gct_queue.Queue, + Awaitable[gct_queue.Queue] + ]]: + raise NotImplementedError() + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Union[ + gct_queue.Queue, + Awaitable[gct_queue.Queue] + ]]: + raise NotImplementedError() + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + Union[ + cloudtasks.ListTasksResponse, + Awaitable[cloudtasks.ListTasksResponse] + ]]: + raise NotImplementedError() + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + Union[ + gct_task.Task, + Awaitable[gct_task.Task] + ]]: + raise NotImplementedError() + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def lease_tasks(self) -> Callable[ + [cloudtasks.LeaseTasksRequest], + Union[ + cloudtasks.LeaseTasksResponse, + Awaitable[cloudtasks.LeaseTasksResponse] + ]]: + raise NotImplementedError() + + @property + def acknowledge_task(self) -> Callable[ + [cloudtasks.AcknowledgeTaskRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def renew_lease(self) -> Callable[ + [cloudtasks.RenewLeaseRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + @property + def cancel_lease(self) -> Callable[ + [cloudtasks.CancelLeaseRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'CloudTasksTransport', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py new file mode 100644 index 00000000..252400e8 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,942 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_queues' not in self._stubs: + self._stubs['list_queues'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ListQueues', + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs['list_queues'] + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_queue' not in self._stubs: + self._stubs['get_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetQueue', + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['get_queue'] + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_queue' not in self._stubs: + self._stubs['create_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue', + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['create_queue'] + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_queue' not in self._stubs: + self._stubs['update_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue', + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['update_queue'] + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_queue' not in self._stubs: + self._stubs['delete_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue', + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_queue'] + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'purge_queue' not in self._stubs: + self._stubs['purge_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue', + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['purge_queue'] + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pause_queue' not in self._stubs: + self._stubs['pause_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue', + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['pause_queue'] + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_queue' not in self._stubs: + self._stubs['resume_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue', + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['resume_queue'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ListTasks', + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetTask', + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CreateTask', + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs['create_task'] + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask', + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_task'] + + @property + def lease_tasks(self) -> Callable[ + [cloudtasks.LeaseTasksRequest], + cloudtasks.LeaseTasksResponse]: + r"""Return a callable for the lease tasks method over gRPC. + + Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Returns: + Callable[[~.LeaseTasksRequest], + ~.LeaseTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lease_tasks' not in self._stubs: + self._stubs['lease_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks', + request_serializer=cloudtasks.LeaseTasksRequest.serialize, + response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, + ) + return self._stubs['lease_tasks'] + + @property + def acknowledge_task(self) -> Callable[ + [cloudtasks.AcknowledgeTaskRequest], + empty_pb2.Empty]: + r"""Return a callable for the acknowledge task method over gRPC. + + Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Returns: + Callable[[~.AcknowledgeTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'acknowledge_task' not in self._stubs: + self._stubs['acknowledge_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask', + request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['acknowledge_task'] + + @property + def renew_lease(self) -> Callable[ + [cloudtasks.RenewLeaseRequest], + task.Task]: + r"""Return a callable for the renew lease method over gRPC. + + Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Returns: + Callable[[~.RenewLeaseRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'renew_lease' not in self._stubs: + self._stubs['renew_lease'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/RenewLease', + request_serializer=cloudtasks.RenewLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['renew_lease'] + + @property + def cancel_lease(self) -> Callable[ + [cloudtasks.CancelLeaseRequest], + task.Task]: + r"""Return a callable for the cancel lease method over gRPC. + + Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Returns: + Callable[[~.CancelLeaseRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_lease' not in self._stubs: + self._stubs['cancel_lease'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CancelLease', + request_serializer=cloudtasks.CancelLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['cancel_lease'] + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/RunTask', + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['run_task'] + + +__all__ = ( + 'CloudTasksGrpcTransport', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 00000000..b427ac44 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,946 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + Awaitable[cloudtasks.ListQueuesResponse]]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_queues' not in self._stubs: + self._stubs['list_queues'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ListQueues', + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs['list_queues'] + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_queue' not in self._stubs: + self._stubs['get_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetQueue', + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['get_queue'] + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_queue' not in self._stubs: + self._stubs['create_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue', + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['create_queue'] + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_queue' not in self._stubs: + self._stubs['update_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue', + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['update_queue'] + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_queue' not in self._stubs: + self._stubs['delete_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue', + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_queue'] + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'purge_queue' not in self._stubs: + self._stubs['purge_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue', + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['purge_queue'] + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pause_queue' not in self._stubs: + self._stubs['pause_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue', + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['pause_queue'] + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_queue' not in self._stubs: + self._stubs['resume_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue', + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['resume_queue'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + Awaitable[cloudtasks.ListTasksResponse]]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ListTasks', + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetTask', + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CreateTask', + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs['create_task'] + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask', + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_task'] + + @property + def lease_tasks(self) -> Callable[ + [cloudtasks.LeaseTasksRequest], + Awaitable[cloudtasks.LeaseTasksResponse]]: + r"""Return a callable for the lease tasks method over gRPC. + + Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Returns: + Callable[[~.LeaseTasksRequest], + Awaitable[~.LeaseTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lease_tasks' not in self._stubs: + self._stubs['lease_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks', + request_serializer=cloudtasks.LeaseTasksRequest.serialize, + response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, + ) + return self._stubs['lease_tasks'] + + @property + def acknowledge_task(self) -> Callable[ + [cloudtasks.AcknowledgeTaskRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the acknowledge task method over gRPC. + + Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Returns: + Callable[[~.AcknowledgeTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'acknowledge_task' not in self._stubs: + self._stubs['acknowledge_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask', + request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['acknowledge_task'] + + @property + def renew_lease(self) -> Callable[ + [cloudtasks.RenewLeaseRequest], + Awaitable[task.Task]]: + r"""Return a callable for the renew lease method over gRPC. + + Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Returns: + Callable[[~.RenewLeaseRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'renew_lease' not in self._stubs: + self._stubs['renew_lease'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/RenewLease', + request_serializer=cloudtasks.RenewLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['renew_lease'] + + @property + def cancel_lease(self) -> Callable[ + [cloudtasks.CancelLeaseRequest], + Awaitable[task.Task]]: + r"""Return a callable for the cancel lease method over gRPC. + + Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Returns: + Callable[[~.CancelLeaseRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_lease' not in self._stubs: + self._stubs['cancel_lease'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CancelLease', + request_serializer=cloudtasks.CancelLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['cancel_lease'] + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/RunTask', + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['run_task'] + + +__all__ = ( + 'CloudTasksGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py new file mode 100644 index 00000000..06717c3d --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudtasks import ( + AcknowledgeTaskRequest, + CancelLeaseRequest, + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + LeaseTasksRequest, + LeaseTasksResponse, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + RenewLeaseRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from .queue import ( + Queue, + QueueStats, + RateLimits, + RetryConfig, +) +from .target import ( + AppEngineHttpRequest, + AppEngineHttpTarget, + AppEngineRouting, + PullMessage, + PullTarget, + HttpMethod, +) +from .task import ( + AttemptStatus, + Task, + TaskStatus, +) + +__all__ = ( + 'AcknowledgeTaskRequest', + 'CancelLeaseRequest', + 'CreateQueueRequest', + 'CreateTaskRequest', + 'DeleteQueueRequest', + 'DeleteTaskRequest', + 'GetQueueRequest', + 'GetTaskRequest', + 'LeaseTasksRequest', + 'LeaseTasksResponse', + 'ListQueuesRequest', + 'ListQueuesResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'PauseQueueRequest', + 'PurgeQueueRequest', + 'RenewLeaseRequest', + 'ResumeQueueRequest', + 'RunTaskRequest', + 'UpdateQueueRequest', + 'Queue', + 'QueueStats', + 'RateLimits', + 'RetryConfig', + 'AppEngineHttpRequest', + 'AppEngineHttpTarget', + 'AppEngineRouting', + 'PullMessage', + 'PullTarget', + 'HttpMethod', + 'AttemptStatus', + 'Task', + 'TaskStatus', +) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py new file mode 100644 index 00000000..7090aa05 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py @@ -0,0 +1,869 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta2', + manifest={ + 'ListQueuesRequest', + 'ListQueuesResponse', + 'GetQueueRequest', + 'CreateQueueRequest', + 'UpdateQueueRequest', + 'DeleteQueueRequest', + 'PurgeQueueRequest', + 'PauseQueueRequest', + 'ResumeQueueRequest', + 'ListTasksRequest', + 'ListTasksResponse', + 'GetTaskRequest', + 'CreateTaskRequest', + 'DeleteTaskRequest', + 'LeaseTasksRequest', + 'LeaseTasksResponse', + 'AcknowledgeTaskRequest', + 'RenewLeaseRequest', + 'CancelLeaseRequest', + 'RunTaskRequest', + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2beta2.Queue] field can be used + as a filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "app_engine_http_target: \*". + + Note that using filters might cause fewer queues than the + requested_page size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter] + while iterating through pages. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask_pb2.FieldMask, + ) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Attributes: + queues (Sequence[google.cloud.tasks_v2beta2.types.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2beta2.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + read_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] cannot + be the same as an existing queue. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + queue = proto.Field( + proto.MESSAGE, + number=2, + message=gct_queue.Queue, + ) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + + Attributes: + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2beta2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2beta2.Queue.name] + cannot be changed. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue = proto.Field( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Attributes: + tasks (Sequence[google.cloud.tasks_v2beta2.types.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + with this value as the + [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (google.cloud.tasks_v2beta2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name is + not specified then the system will generate a random unique + task id, which will be set in the task returned in the + [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or completed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour after + the original task was deleted or completed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9days + after the original task was deleted or completed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + task = proto.Field( + proto.MESSAGE, + number=2, + message=gct_task.Task, + ) + response_view = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class LeaseTasksRequest(proto.Message): + r"""Request message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + max_tasks (int): + The maximum number of tasks to lease. + + The system will make a best effort to return as close to as + ``max_tasks`` as possible. + + The largest that ``max_tasks`` can be is 1000. + + The maximum total size of a [lease tasks + response][google.cloud.tasks.v2beta2.LeaseTasksResponse] is + 32 MB. If the sum of all task sizes requested reaches this + limit, fewer tasks than requested are returned. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + filter (str): + ``filter`` can be used to specify a subset of tasks to + lease. + + When ``filter`` is set to ``tag=`` then the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will contain only tasks whose + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal + to ````. ```` must be less than 500 + characters. + + When ``filter`` is set to ``tag_function=oldest_tag()``, + only tasks which have the same tag as the task with the + oldest + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be returned. + + Grammar Syntax: + + - ``filter = "tag=" tag | "tag_function=" function`` + + - ``tag = string`` + + - ``function = "oldest_tag()"`` + + The ``oldest_tag()`` function returns tasks which have the + same tag as the oldest task (ordered by schedule time). + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. Tag + which aren't UTF-8 encoded can't be used in the + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter] + and the task's + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be + displayed as empty in Cloud Tasks. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + max_tasks = proto.Field( + proto.INT32, + number=2, + ) + lease_duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + response_view = proto.Field( + proto.ENUM, + number=4, + enum=gct_task.Task.View, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + + +class LeaseTasksResponse(proto.Message): + r"""Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Attributes: + tasks (Sequence[google.cloud.tasks_v2beta2.types.Task]): + The leased tasks. + """ + + tasks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + + +class AcknowledgeTaskRequest(proto.Message): + r"""Request message for acknowledging a task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + schedule_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class RenewLeaseRequest(proto.Message): + r"""Request message for renewing a lease using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The desired new lease duration, starting from now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + schedule_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + lease_duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + response_view = proto.Field( + proto.ENUM, + number=4, + enum=gct_task.Task.View, + ) + + +class CancelLeaseRequest(proto.Message): + r"""Request message for canceling a lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + schedule_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_view = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py new file mode 100644 index 00000000..7473bc18 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2beta2.types import target +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta2', + manifest={ + 'Queue', + 'RateLimits', + 'RetryConfig', + 'QueueStats', + }, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + target types, and others. + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_http_target (google.cloud.tasks_v2beta2.types.AppEngineHttpTarget): + App Engine HTTP target. + + An App Engine queue is a queue that has an + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget]. + pull_target (google.cloud.tasks_v2beta2.types.PullTarget): + Pull target. + + A pull queue is a queue that has a + [PullTarget][google.cloud.tasks.v2beta2.PullTarget]. + rate_limits (google.cloud.tasks_v2beta2.types.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] + and + [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + are related because they both control task attempts however + they control how tasks are attempted in different ways: + + - [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + retry_config (google.cloud.tasks_v2beta2.types.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (google.cloud.tasks_v2beta2.types.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by calling + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2beta2.Task.create_time] + before this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + task_ttl (google.protobuf.duration_pb2.Duration): + The maximum amount of time that a task will be retained in + this queue. + + Queues created by Cloud Tasks have a default ``task_ttl`` of + 31 days. After a task has lived for ``task_ttl``, the task + will be deleted regardless of whether it was dispatched or + not. + + The ``task_ttl`` for queues created via queue.yaml/xml is + equal to the maximum duration because there is a `storage + quota `__ + for these queues. To view the maximum valid duration, see + the documentation for [Duration][google.protobuf.Duration]. + tombstone_ttl (google.protobuf.duration_pb2.Duration): + The task tombstone time to live (TTL). + + After a task is deleted or completed, the task's tombstone + is retained for the length of time specified by + ``tombstone_ttl``. The tombstone is used by task + de-duplication; another task with the same name can't be + created until the tombstone has expired. For more + information about task de-duplication, see the documentation + for + [CreateTaskRequest][google.cloud.tasks.v2beta2.CreateTaskRequest.task]. + + Queues created by Cloud Tasks have a default + ``tombstone_ttl`` of 1 hour. + stats (google.cloud.tasks_v2beta2.types.QueueStats): + Output only. The realtime, informational + statistics for a queue. In order to receive the + statistics the caller should include this field + in the FieldMask. + """ + class State(proto.Enum): + r"""State of the queue.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + name = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_target = proto.Field( + proto.MESSAGE, + number=3, + oneof='target_type', + message=target.AppEngineHttpTarget, + ) + pull_target = proto.Field( + proto.MESSAGE, + number=4, + oneof='target_type', + message=target.PullTarget, + ) + rate_limits = proto.Field( + proto.MESSAGE, + number=5, + message='RateLimits', + ) + retry_config = proto.Field( + proto.MESSAGE, + number=6, + message='RetryConfig', + ) + state = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + purge_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + task_ttl = proto.Field( + proto.MESSAGE, + number=9, + message=duration_pb2.Duration, + ) + tombstone_ttl = proto.Field( + proto.MESSAGE, + number=10, + message=duration_pb2.Duration, + ) + stats = proto.Field( + proto.MESSAGE, + number=16, + message='QueueStats', + ) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a + task even if the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits]. + + Attributes: + max_tasks_dispatched_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], + the maximum allowed value is 500. + - This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. In + addition to the ``max_tasks_dispatched_per_second`` + limit, a maximum of 10 QPS of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per pull queue. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. + + The default value of ``max_burst_size`` is picked by Cloud + Tasks based on the value of + [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. + + The maximum value of ``max_burst_size`` is 500. + + For App Engine queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + If + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + is called on a queue without explicitly setting a value for + ``max_burst_size``, ``max_burst_size`` value will get + updated if + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + is updating + [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. + max_concurrent_tasks (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget] and always + -1, which indicates no limit. No other queue types can have + ``max_concurrent_tasks`` set to -1. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_tasks_dispatched_per_second = proto.Field( + proto.DOUBLE, + number=1, + ) + max_burst_size = proto.Field( + proto.INT32, + number=2, + ) + max_concurrent_tasks = proto.Field( + proto.INT32, + number=3, + ) + + +class RetryConfig(proto.Message): + r"""Retry config. + These settings determine how a failed task attempt is retried. + + Attributes: + max_attempts (int): + The maximum number of attempts for a task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be > 0. + unlimited_attempts (bool): + If true, then the number of attempts is + unlimited. + max_retry_duration (google.protobuf.duration_pb2.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts = proto.Field( + proto.INT32, + number=1, + oneof='num_attempts', + ) + unlimited_attempts = proto.Field( + proto.BOOL, + number=2, + oneof='num_attempts', + ) + max_retry_duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + min_backoff = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_backoff = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) + max_doublings = proto.Field( + proto.INT32, + number=6, + ) + + +class QueueStats(proto.Message): + r"""Statistics for a queue. + Attributes: + tasks_count (int): + Output only. An estimation of the number of + tasks in the queue, that is, the tasks in the + queue that haven't been executed, the tasks in + the queue which the queue has dispatched but has + not yet received a reply for, and the failed + tasks that the queue is retrying. + oldest_estimated_arrival_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. An estimation of the nearest + time in the future where a task in the queue is + scheduled to be executed. + executed_last_minute_count (int): + Output only. The number of tasks that the + queue has dispatched and received a reply for + during the last minute. This variable counts + both successful and non-successful executions. + concurrent_dispatches_count (int): + Output only. The number of requests that the + queue has dispatched but has not received a + reply for yet. + effective_execution_rate (float): + Output only. The current maximum number of + tasks per second executed by the queue. The + maximum value of this variable is controlled by + the RateLimits of the Queue. However, this value + could be less to avoid overloading the endpoints + tasks in the queue are targeting. + """ + + tasks_count = proto.Field( + proto.INT64, + number=1, + ) + oldest_estimated_arrival_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + executed_last_minute_count = proto.Field( + proto.INT64, + number=3, + ) + concurrent_dispatches_count = proto.Field( + proto.INT64, + number=4, + ) + effective_execution_rate = proto.Field( + proto.DOUBLE, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py new file mode 100644 index 00000000..fa3f9557 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py @@ -0,0 +1,487 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta2', + manifest={ + 'HttpMethod', + 'PullTarget', + 'PullMessage', + 'AppEngineHttpTarget', + 'AppEngineHttpRequest', + 'AppEngineRouting', + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the task.""" + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + + +class PullTarget(proto.Message): + r"""Pull target. """ + + +class PullMessage(proto.Message): + r"""The pull message contains data that can be used by the caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] to + process the task. + + This proto can only be used for tasks in a queue which has + [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] set. + + Attributes: + payload (bytes): + A data payload consumed by the worker to + execute the task. + tag (str): + The task's tag. + + Tags allow similar tasks to be processed in a batch. If you + label tasks with a tag, your worker can [lease + tasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + with the same tag using + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]. + For example, if you want to aggregate the events associated + with a specific user once a day, you could tag tasks with + the user ID. + + The task's tag can only be set when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + The tag must be less than 500 characters. + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. If a tag + isn't UTF-8 encoded, the tag will be empty when the task is + returned by Cloud Tasks. + """ + + payload = proto.Field( + proto.BYTES, + number=1, + ) + tag = proto.Field( + proto.STRING, + number=2, + ) + + +class AppEngineHttpTarget(proto.Message): + r"""App Engine HTTP target. + + The task will be delivered to the App Engine application hostname + specified by its + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + and + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest]. + The documentation for + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + explains how the task's host URL is constructed. + + Using + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + app_engine_routing_override (google.cloud.tasks_v2beta2.types.AppEngineRouting): + Overrides for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + + If set, ``app_engine_routing_override`` is used for all + tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + """ + + app_engine_routing_override = proto.Field( + proto.MESSAGE, + number=1, + message='AppEngineRouting', + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + This proto can only be used for tasks in a queue which has + [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] + set. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2beta2.AppEngineRouting] + used to construct the URL that the task is delivered to can be set + at the queue-level or task-level: + + - If set, + [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] + is used for all tasks in the queue, no matter what the setting is + for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] ``+`` + [relative_url][google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative_url] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][Task.dispatch_deadline]. Failed tasks will be retried + according to the [retry + configuration][google.cloud.tasks.v2beta2.Queue.retry_config]. + ``503`` (Service Unavailable) is considered an App Engine system + error instead of an application error and will cause Cloud Tasks' + traffic congestion control to temporarily throttle the queue's + dispatches. Unlike other types of task targets, a ``429`` (Too Many + Requests) response from an app handler does not cause traffic + congestion control to throttle the queue. + + Attributes: + http_method (google.cloud.tasks_v2beta2.types.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt fails with error code 405 (Method + Not Allowed). See `Writing a push task request + handler `__ + and the App Engine documentation for your runtime on `How + Requests are + Handled `__. + app_engine_routing (google.cloud.tasks_v2beta2.types.AppEngineRouting): + Task-level setting for App Engine routing. + + If set, + [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] + is used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + relative_url (str): + The relative URL. + The relative URL must begin with "/" and must be + a valid HTTP relative URL. It can contain a path + and query string arguments. If the relative URL + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (Sequence[google.cloud.tasks_v2beta2.types.AppEngineHttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [payload][google.cloud.tasks.v2beta2.AppEngineHttpRequest.payload], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-*`` + - ``X-AppEngine-*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2beta2.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + documentation. + payload (bytes): + Payload. + + The payload will be sent as the HTTP message body. A message + body, and thus a payload, is allowed only if the HTTP method + is POST or PUT. It is an error to set a data payload on a + task with an incompatible + [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. + """ + + http_method = proto.Field( + proto.ENUM, + number=1, + enum='HttpMethod', + ) + app_engine_routing = proto.Field( + proto.MESSAGE, + number=2, + message='AppEngineRouting', + ) + relative_url = proto.Field( + proto.STRING, + number=3, + ) + headers = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + payload = proto.Field( + proto.BYTES, + number=5, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + For more information, see `How Requests are + Routed `__. + + The host is constructed as: + + - ``host = [application_domain_name]``\ + ``| [service] + '.' + [application_domain_name]``\ + ``| [version] + '.' + [application_domain_name]``\ + ``| [version_dot_service]+ '.' + [application_domain_name]``\ + ``| [instance] + '.' + [application_domain_name]``\ + ``| [instance_dot_service] + '.' + [application_domain_name]``\ + ``| [instance_dot_version] + '.' + [application_domain_name]``\ + ``| [instance_dot_version_dot_service] + '.' + [application_domain_name]`` + + - ``application_domain_name`` = The domain name of the app, + for example .appspot.com, which is associated with the + queue's project ID. Some tasks which were created using + the App Engine SDK use a custom domain name. + + - ``service =`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``version =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + + - ``version_dot_service =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``instance =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + + - ``instance_dot_service =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``instance_dot_version =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + + - ``instance_dot_version_dot_service =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + If + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + is empty, then the task will be sent to the service which is + the default service when the task is attempted. + + If + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + is empty, then the task will be sent to the version which is + the default version when the task is attempted. + + If + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + is empty, then the task will be sent to an instance which is + available when the task is attempted. + + If + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + or + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + is invalid, then the task will be sent to the default + version of the default service when the task is attempted. + """ + + service = proto.Field( + proto.STRING, + number=1, + ) + version = proto.Field( + proto.STRING, + number=2, + ) + instance = proto.Field( + proto.STRING, + number=3, + ) + host = proto.Field( + proto.STRING, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py new file mode 100644 index 00000000..801ebc72 --- /dev/null +++ b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2beta2.types import target +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta2', + manifest={ + 'Task', + 'TaskStatus', + 'AttemptStatus', + }, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (google.cloud.tasks_v2beta2.types.AppEngineHttpRequest): + App Engine HTTP request that is sent to the task's target. + Can be set only if + [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] + is set on the queue. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + set. + pull_message (google.cloud.tasks_v2beta2.types.PullMessage): + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + to process the task. Can be set only if + [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] + is set on the queue. + + A pull task is a task that has + [PullMessage][google.cloud.tasks.v2beta2.PullMessage] set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the task is scheduled to be attempted. + + For App Engine queues, this is when the task will be + attempted or retried. + + For pull queues, this is the time when the task is available + to be leased; if a task is currently leased, this is the + time when the current lease expires, that is, the time that + the task was leased plus the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + status (google.cloud.tasks_v2beta2.types.TaskStatus): + Output only. The task status. + view (google.cloud.tasks_v2beta2.types.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] has been returned. + """ + class View(proto.Enum): + r"""The view specifies a subset of + [Task][google.cloud.tasks.v2beta2.Task] data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_request = proto.Field( + proto.MESSAGE, + number=3, + oneof='payload_type', + message=target.AppEngineHttpRequest, + ) + pull_message = proto.Field( + proto.MESSAGE, + number=4, + oneof='payload_type', + message=target.PullMessage, + ) + schedule_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + create_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + status = proto.Field( + proto.MESSAGE, + number=7, + message='TaskStatus', + ) + view = proto.Field( + proto.ENUM, + number=8, + enum=View, + ) + + +class TaskStatus(proto.Message): + r"""Status of the task. + Attributes: + attempt_dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + attempt_response_count (int): + Output only. The number of attempts which have received a + response. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + first_attempt_status (google.cloud.tasks_v2beta2.types.AttemptStatus): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2beta2.AttemptStatus.dispatch_time] + will be set. The other + [AttemptStatus][google.cloud.tasks.v2beta2.AttemptStatus] + information is not retained by Cloud Tasks. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + last_attempt_status (google.cloud.tasks_v2beta2.types.AttemptStatus): + Output only. The status of the task's last attempt. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + """ + + attempt_dispatch_count = proto.Field( + proto.INT32, + number=1, + ) + attempt_response_count = proto.Field( + proto.INT32, + number=2, + ) + first_attempt_status = proto.Field( + proto.MESSAGE, + number=3, + message='AttemptStatus', + ) + last_attempt_status = proto.Field( + proto.MESSAGE, + number=4, + message='AttemptStatus', + ) + + +class AttemptStatus(proto.Message): + r"""The status of a task attempt. + Attributes: + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (google.rpc.status_pb2.Status): + Output only. The response from the target for + this attempt. + If the task has not been attempted or the task + is currently running then the response status is + unset. + """ + + schedule_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + dispatch_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + response_status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/mypy.ini b/owl-bot-staging/v2beta2/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/owl-bot-staging/v2beta2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v2beta2/noxfile.py b/owl-bot-staging/v2beta2/noxfile.py new file mode 100644 index 00000000..943cc28c --- /dev/null +++ b/owl-bot-staging/v2beta2/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/tasks_v2beta2/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py b/owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py new file mode 100644 index 00000000..4b2201b4 --- /dev/null +++ b/owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py @@ -0,0 +1,195 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'acknowledge_task': ('name', 'schedule_time', ), + 'cancel_lease': ('name', 'schedule_time', 'response_view', ), + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', 'read_mask', ), + 'get_task': ('name', 'response_view', ), + 'lease_tasks': ('parent', 'lease_duration', 'max_tasks', 'response_view', 'filter', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', 'read_mask', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'renew_lease': ('name', 'schedule_time', 'lease_duration', 'response_view', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2beta2/setup.py b/owl-bot-staging/v2beta2/setup.py new file mode 100644 index 00000000..2254eec1 --- /dev/null +++ b/owl-bot-staging/v2beta2/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-tasks', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v2beta2/tests/__init__.py b/owl-bot-staging/v2beta2/tests/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta2/tests/unit/__init__.py b/owl-bot-staging/v2beta2/tests/unit/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py b/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py new file mode 100644 index 00000000..d2461be9 --- /dev/null +++ b/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py @@ -0,0 +1,6121 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.tasks_v2beta2.services.cloud_tasks import CloudTasksAsyncClient +from google.cloud.tasks_v2beta2.services.cloud_tasks import CloudTasksClient +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.services.cloud_tasks import transports +from google.cloud.tasks_v2beta2.services.cloud_tasks.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + available_transports = [ + transports.CloudTasksGrpcTransport, + ] + assert transport in available_transports + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) +@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) +def test_cloud_tasks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) +@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_tasks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_tasks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_tasks_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_queues(transport: str = 'grpc', request_type=cloudtasks.ListQueuesRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_queues_from_dict(): + test_list_queues(request_type=dict) + + +def test_list_queues_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + client.list_queues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + +@pytest.mark.asyncio +async def test_list_queues_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListQueuesRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_queues_async_from_dict(): + await test_list_queues_async(request_type=dict) + + +def test_list_queues_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + call.return_value = cloudtasks.ListQueuesResponse() + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_queues_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_queues_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), + parent='parent_value', + ) + + +def test_list_queues_pager(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) + for i in results) + +def test_list_queues_pages(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_queues(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) + for i in responses) + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_queues(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_queue(transport: str = 'grpc', request_type=cloudtasks.GetQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_from_dict(): + test_get_queue(request_type=dict) + + +def test_get_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + client.get_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + +@pytest.mark.asyncio +async def test_get_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_get_queue_async_from_dict(): + await test_get_queue_async(request_type=dict) + + +def test_get_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), + name='name_value', + ) + + +def test_create_queue(transport: str = 'grpc', request_type=cloudtasks.CreateQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_from_dict(): + test_create_queue(request_type=dict) + + +def test_create_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + client.create_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + +@pytest.mark.asyncio +async def test_create_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + )) + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_create_queue_async_from_dict(): + await test_create_queue_async(request_type=dict) + + +def test_create_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + call.return_value = gct_queue.Queue() + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].queue == gct_queue.Queue(name='name_value') + + +def test_create_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].queue == gct_queue.Queue(name='name_value') + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + +def test_update_queue(transport: str = 'grpc', request_type=cloudtasks.UpdateQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_from_dict(): + test_update_queue(request_type=dict) + + +def test_update_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + client.update_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + +@pytest.mark.asyncio +async def test_update_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.UpdateQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + )) + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_update_queue_async_from_dict(): + await test_update_queue_async(request_type=dict) + + +def test_update_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = 'queue.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + call.return_value = gct_queue.Queue() + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'queue.name=queue.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = 'queue.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'queue.name=queue.name/value', + ) in kw['metadata'] + + +def test_update_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].queue == gct_queue.Queue(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].queue == gct_queue.Queue(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_queue(transport: str = 'grpc', request_type=cloudtasks.DeleteQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_from_dict(): + test_delete_queue(request_type=dict) + + +def test_delete_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + client.delete_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + +@pytest.mark.asyncio +async def test_delete_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_queue_async_from_dict(): + await test_delete_queue_async(request_type=dict) + + +def test_delete_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + call.return_value = None + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name='name_value', + ) + + +def test_purge_queue(transport: str = 'grpc', request_type=cloudtasks.PurgeQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_from_dict(): + test_purge_queue(request_type=dict) + + +def test_purge_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + client.purge_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + +@pytest.mark.asyncio +async def test_purge_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PurgeQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_purge_queue_async_from_dict(): + await test_purge_queue_async(request_type=dict) + + +def test_purge_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_purge_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name='name_value', + ) + + +def test_pause_queue(transport: str = 'grpc', request_type=cloudtasks.PauseQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_from_dict(): + test_pause_queue(request_type=dict) + + +def test_pause_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + client.pause_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + +@pytest.mark.asyncio +async def test_pause_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PauseQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_pause_queue_async_from_dict(): + await test_pause_queue_async(request_type=dict) + + +def test_pause_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_pause_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), + name='name_value', + ) + + +def test_resume_queue(transport: str = 'grpc', request_type=cloudtasks.ResumeQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_from_dict(): + test_resume_queue(request_type=dict) + + +def test_resume_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + client.resume_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + +@pytest.mark.asyncio +async def test_resume_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ResumeQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + )) + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_resume_queue_async_from_dict(): + await test_resume_queue_async(request_type=dict) + + +def test_resume_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_resume_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name='name_value', + ) + + +def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_list_tasks(transport: str = 'grpc', request_type=cloudtasks.ListTasksRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token='next_page_token_value', + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_tasks_from_dict(): + test_list_tasks(request_type=dict) + + +def test_list_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + client.list_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListTasksRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + + +def test_list_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = cloudtasks.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), + parent='parent_value', + ) + + +def test_list_tasks_pager(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, task.Task) + for i in results) + +def test_list_tasks_pages(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) + for i in responses) + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_tasks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_task(transport: str = 'grpc', request_type=cloudtasks.GetTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +def test_get_task_from_dict(): + test_get_task(request_type=dict) + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + view=task.Task.View.BASIC, + )) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + + +def test_get_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = task.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), + name='name_value', + ) + + +def test_create_task(transport: str = 'grpc', request_type=cloudtasks.CreateTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name='name_value', + view=gct_task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == 'name_value' + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_from_dict(): + test_create_task(request_type=dict) + + +def test_create_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + client.create_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task( + name='name_value', + view=gct_task.Task.View.BASIC, + )) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == 'name_value' + assert response.view == gct_task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + + +def test_create_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = gct_task.Task() + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].task == gct_task.Task(name='name_value') + + +def test_create_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].task == gct_task.Task(name='name_value') + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + +def test_delete_task(transport: str = 'grpc', request_type=cloudtasks.DeleteTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_from_dict(): + test_delete_task(request_type=dict) + + +def test_delete_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + client.delete_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + + +def test_delete_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = None + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), + name='name_value', + ) + + +def test_lease_tasks(transport: str = 'grpc', request_type=cloudtasks.LeaseTasksRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lease_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse( + ) + response = client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.LeaseTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.LeaseTasksResponse) + + +def test_lease_tasks_from_dict(): + test_lease_tasks(request_type=dict) + + +def test_lease_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lease_tasks), + '__call__') as call: + client.lease_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.LeaseTasksRequest() + + +@pytest.mark.asyncio +async def test_lease_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.LeaseTasksRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lease_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.LeaseTasksResponse( + )) + response = await client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.LeaseTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.LeaseTasksResponse) + + +@pytest.mark.asyncio +async def test_lease_tasks_async_from_dict(): + await test_lease_tasks_async(request_type=dict) + + +def test_lease_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.LeaseTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lease_tasks), + '__call__') as call: + call.return_value = cloudtasks.LeaseTasksResponse() + client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_lease_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.LeaseTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lease_tasks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.LeaseTasksResponse()) + await client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_lease_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lease_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.lease_tasks( + parent='parent_value', + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) + + +def test_lease_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lease_tasks( + cloudtasks.LeaseTasksRequest(), + parent='parent_value', + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_lease_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lease_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.LeaseTasksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.lease_tasks( + parent='parent_value', + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) + + +@pytest.mark.asyncio +async def test_lease_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.lease_tasks( + cloudtasks.LeaseTasksRequest(), + parent='parent_value', + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +def test_acknowledge_task(transport: str = 'grpc', request_type=cloudtasks.AcknowledgeTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.AcknowledgeTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_task_from_dict(): + test_acknowledge_task(request_type=dict) + + +def test_acknowledge_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_task), + '__call__') as call: + client.acknowledge_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.AcknowledgeTaskRequest() + + +@pytest.mark.asyncio +async def test_acknowledge_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.AcknowledgeTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.AcknowledgeTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_acknowledge_task_async_from_dict(): + await test_acknowledge_task_async(request_type=dict) + + +def test_acknowledge_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.AcknowledgeTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_task), + '__call__') as call: + call.return_value = None + client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_acknowledge_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.AcknowledgeTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_acknowledge_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.acknowledge_task( + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + + +def test_acknowledge_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge_task( + cloudtasks.AcknowledgeTaskRequest(), + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_acknowledge_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.acknowledge_task( + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_acknowledge_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.acknowledge_task( + cloudtasks.AcknowledgeTaskRequest(), + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_renew_lease(transport: str = 'grpc', request_type=cloudtasks.RenewLeaseRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.renew_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RenewLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +def test_renew_lease_from_dict(): + test_renew_lease(request_type=dict) + + +def test_renew_lease_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.renew_lease), + '__call__') as call: + client.renew_lease() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RenewLeaseRequest() + + +@pytest.mark.asyncio +async def test_renew_lease_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RenewLeaseRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.renew_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + view=task.Task.View.BASIC, + )) + response = await client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RenewLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_renew_lease_async_from_dict(): + await test_renew_lease_async(request_type=dict) + + +def test_renew_lease_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RenewLeaseRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.renew_lease), + '__call__') as call: + call.return_value = task.Task() + client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_renew_lease_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RenewLeaseRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.renew_lease), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_renew_lease_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.renew_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.renew_lease( + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) + + +def test_renew_lease_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.renew_lease( + cloudtasks.RenewLeaseRequest(), + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_renew_lease_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.renew_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.renew_lease( + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) + + +@pytest.mark.asyncio +async def test_renew_lease_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.renew_lease( + cloudtasks.RenewLeaseRequest(), + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +def test_cancel_lease(transport: str = 'grpc', request_type=cloudtasks.CancelLeaseRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CancelLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +def test_cancel_lease_from_dict(): + test_cancel_lease(request_type=dict) + + +def test_cancel_lease_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_lease), + '__call__') as call: + client.cancel_lease() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CancelLeaseRequest() + + +@pytest.mark.asyncio +async def test_cancel_lease_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CancelLeaseRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + view=task.Task.View.BASIC, + )) + response = await client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CancelLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_cancel_lease_async_from_dict(): + await test_cancel_lease_async(request_type=dict) + + +def test_cancel_lease_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CancelLeaseRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_lease), + '__call__') as call: + call.return_value = task.Task() + client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_lease_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CancelLeaseRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_lease), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_cancel_lease_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_lease( + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + + +def test_cancel_lease_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_lease( + cloudtasks.CancelLeaseRequest(), + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_cancel_lease_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_lease), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_lease( + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_cancel_lease_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_lease( + cloudtasks.CancelLeaseRequest(), + name='name_value', + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_run_task(transport: str = 'grpc', request_type=cloudtasks.RunTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +def test_run_task_from_dict(): + test_run_task(request_type=dict) + + +def test_run_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + client.run_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RunTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + view=task.Task.View.BASIC, + )) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + + +def test_run_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = task.Task() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_run_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_run_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudTasksGrpcTransport, + ) + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_queues', + 'get_queue', + 'create_queue', + 'update_queue', + 'delete_queue', + 'purge_queue', + 'pause_queue', + 'resume_queue', + 'get_iam_policy', + 'set_iam_policy', + 'test_iam_permissions', + 'list_tasks', + 'get_task', + 'create_task', + 'delete_task', + 'lease_tasks', + 'acknowledge_task', + 'renew_lease', + 'cancel_lease', + 'run_task', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudTasksGrpcTransport, grpc_helpers), + (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_cloud_tasks_host_no_port(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com'), + ) + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +def test_cloud_tasks_host_with_port(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com:8000'), + ) + assert client.transport._host == 'cloudtasks.googleapis.com:8000' + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + expected = "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + +def test_task_path(): + project = "cuttlefish" + location = "mussel" + queue = "winkle" + task = "nautilus" + expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "scallop", + "location": "abalone", + "queue": "squid", + "task": "clam", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudTasksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudTasksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudTasksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudTasksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudTasksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudTasksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = CloudTasksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudTasksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudTasksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudTasksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v2beta3/.coveragerc b/owl-bot-staging/v2beta3/.coveragerc new file mode 100644 index 00000000..1d5bc53f --- /dev/null +++ b/owl-bot-staging/v2beta3/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/tasks/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2beta3/MANIFEST.in b/owl-bot-staging/v2beta3/MANIFEST.in new file mode 100644 index 00000000..249d527f --- /dev/null +++ b/owl-bot-staging/v2beta3/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/tasks *.py +recursive-include google/cloud/tasks_v2beta3 *.py diff --git a/owl-bot-staging/v2beta3/README.rst b/owl-bot-staging/v2beta3/README.rst new file mode 100644 index 00000000..6171a7e2 --- /dev/null +++ b/owl-bot-staging/v2beta3/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Tasks API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Tasks API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2beta3/docs/conf.py b/owl-bot-staging/v2beta3/docs/conf.py new file mode 100644 index 00000000..62c563cc --- /dev/null +++ b/owl-bot-staging/v2beta3/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-tasks documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-tasks" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-tasks-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-tasks.tex", + u"google-cloud-tasks Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-tasks", + u"Google Cloud Tasks Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-tasks", + u"google-cloud-tasks Documentation", + author, + "google-cloud-tasks", + "GAPIC library for Google Cloud Tasks API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2beta3/docs/index.rst b/owl-bot-staging/v2beta3/docs/index.rst new file mode 100644 index 00000000..396b0b20 --- /dev/null +++ b/owl-bot-staging/v2beta3/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + tasks_v2beta3/services + tasks_v2beta3/types diff --git a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst new file mode 100644 index 00000000..ef422e09 --- /dev/null +++ b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst @@ -0,0 +1,10 @@ +CloudTasks +---------------------------- + +.. automodule:: google.cloud.tasks_v2beta3.services.cloud_tasks + :members: + :inherited-members: + +.. automodule:: google.cloud.tasks_v2beta3.services.cloud_tasks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst new file mode 100644 index 00000000..bd97721b --- /dev/null +++ b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2beta3 API +=========================================== +.. toctree:: + :maxdepth: 2 + + cloud_tasks diff --git a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst new file mode 100644 index 00000000..0f3455d1 --- /dev/null +++ b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Tasks v2beta3 API +======================================== + +.. automodule:: google.cloud.tasks_v2beta3.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py new file mode 100644 index 00000000..5af667c4 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.tasks_v2beta3.services.cloud_tasks.client import CloudTasksClient +from google.cloud.tasks_v2beta3.services.cloud_tasks.async_client import CloudTasksAsyncClient + +from google.cloud.tasks_v2beta3.types.cloudtasks import CreateQueueRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import CreateTaskRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import DeleteQueueRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import DeleteTaskRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import GetQueueRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import GetTaskRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import ListQueuesRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import ListQueuesResponse +from google.cloud.tasks_v2beta3.types.cloudtasks import ListTasksRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import ListTasksResponse +from google.cloud.tasks_v2beta3.types.cloudtasks import PauseQueueRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import PurgeQueueRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import ResumeQueueRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import RunTaskRequest +from google.cloud.tasks_v2beta3.types.cloudtasks import UpdateQueueRequest +from google.cloud.tasks_v2beta3.types.queue import Queue +from google.cloud.tasks_v2beta3.types.queue import QueueStats +from google.cloud.tasks_v2beta3.types.queue import RateLimits +from google.cloud.tasks_v2beta3.types.queue import RetryConfig +from google.cloud.tasks_v2beta3.types.queue import StackdriverLoggingConfig +from google.cloud.tasks_v2beta3.types.target import AppEngineHttpQueue +from google.cloud.tasks_v2beta3.types.target import AppEngineHttpRequest +from google.cloud.tasks_v2beta3.types.target import AppEngineRouting +from google.cloud.tasks_v2beta3.types.target import HttpRequest +from google.cloud.tasks_v2beta3.types.target import OAuthToken +from google.cloud.tasks_v2beta3.types.target import OidcToken +from google.cloud.tasks_v2beta3.types.target import PullMessage +from google.cloud.tasks_v2beta3.types.target import HttpMethod +from google.cloud.tasks_v2beta3.types.task import Attempt +from google.cloud.tasks_v2beta3.types.task import Task + +__all__ = ('CloudTasksClient', + 'CloudTasksAsyncClient', + 'CreateQueueRequest', + 'CreateTaskRequest', + 'DeleteQueueRequest', + 'DeleteTaskRequest', + 'GetQueueRequest', + 'GetTaskRequest', + 'ListQueuesRequest', + 'ListQueuesResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'PauseQueueRequest', + 'PurgeQueueRequest', + 'ResumeQueueRequest', + 'RunTaskRequest', + 'UpdateQueueRequest', + 'Queue', + 'QueueStats', + 'RateLimits', + 'RetryConfig', + 'StackdriverLoggingConfig', + 'AppEngineHttpQueue', + 'AppEngineHttpRequest', + 'AppEngineRouting', + 'HttpRequest', + 'OAuthToken', + 'OidcToken', + 'PullMessage', + 'HttpMethod', + 'Attempt', + 'Task', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks/py.typed b/owl-bot-staging/v2beta3/google/cloud/tasks/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py new file mode 100644 index 00000000..52588da3 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.cloud_tasks import CloudTasksClient +from .services.cloud_tasks import CloudTasksAsyncClient + +from .types.cloudtasks import CreateQueueRequest +from .types.cloudtasks import CreateTaskRequest +from .types.cloudtasks import DeleteQueueRequest +from .types.cloudtasks import DeleteTaskRequest +from .types.cloudtasks import GetQueueRequest +from .types.cloudtasks import GetTaskRequest +from .types.cloudtasks import ListQueuesRequest +from .types.cloudtasks import ListQueuesResponse +from .types.cloudtasks import ListTasksRequest +from .types.cloudtasks import ListTasksResponse +from .types.cloudtasks import PauseQueueRequest +from .types.cloudtasks import PurgeQueueRequest +from .types.cloudtasks import ResumeQueueRequest +from .types.cloudtasks import RunTaskRequest +from .types.cloudtasks import UpdateQueueRequest +from .types.queue import Queue +from .types.queue import QueueStats +from .types.queue import RateLimits +from .types.queue import RetryConfig +from .types.queue import StackdriverLoggingConfig +from .types.target import AppEngineHttpQueue +from .types.target import AppEngineHttpRequest +from .types.target import AppEngineRouting +from .types.target import HttpRequest +from .types.target import OAuthToken +from .types.target import OidcToken +from .types.target import PullMessage +from .types.target import HttpMethod +from .types.task import Attempt +from .types.task import Task + +__all__ = ( + 'CloudTasksAsyncClient', +'AppEngineHttpQueue', +'AppEngineHttpRequest', +'AppEngineRouting', +'Attempt', +'CloudTasksClient', +'CreateQueueRequest', +'CreateTaskRequest', +'DeleteQueueRequest', +'DeleteTaskRequest', +'GetQueueRequest', +'GetTaskRequest', +'HttpMethod', +'HttpRequest', +'ListQueuesRequest', +'ListQueuesResponse', +'ListTasksRequest', +'ListTasksResponse', +'OAuthToken', +'OidcToken', +'PauseQueueRequest', +'PullMessage', +'PurgeQueueRequest', +'Queue', +'QueueStats', +'RateLimits', +'ResumeQueueRequest', +'RetryConfig', +'RunTaskRequest', +'StackdriverLoggingConfig', +'Task', +'UpdateQueueRequest', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json new file mode 100644 index 00000000..1f8cc2e7 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json @@ -0,0 +1,183 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.tasks_v2beta3", + "protoPackage": "google.cloud.tasks.v2beta3", + "schema": "1.0", + "services": { + "CloudTasks": { + "clients": { + "grpc": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudTasksAsyncClient", + "rpcs": { + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py new file mode 100644 index 00000000..1478acb5 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudTasksClient +from .async_client import CloudTasksAsyncClient + +__all__ = ( + 'CloudTasksClient', + 'CloudTasksAsyncClient', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py new file mode 100644 index 00000000..658f63c0 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py @@ -0,0 +1,1803 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .client import CloudTasksClient + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + queue_path = staticmethod(CloudTasksClient.queue_path) + parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) + task_path = staticmethod(CloudTasksClient.task_path) + parse_task_path = staticmethod(CloudTasksClient.parse_task_path) + common_billing_account_path = staticmethod(CloudTasksClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudTasksClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudTasksClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudTasksClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudTasksClient.parse_common_organization_path) + common_project_path = staticmethod(CloudTasksClient.common_project_path) + parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) + common_location_path = staticmethod(CloudTasksClient.common_location_path) + parse_common_location_path = staticmethod(CloudTasksClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(CloudTasksClient).get_transport_class, type(CloudTasksClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_queues(self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue(self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_queue(self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`google.cloud.tasks_v2beta3.types.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_queue(self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + queue (:class:`google.cloud.tasks_v2beta3.types.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("queue.name", request.queue.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_queue(self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def purge_queue(self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def pause_queue(self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_queue(self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tasks(self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task(self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task(self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.tasks_v2beta3.types.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_task(self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def run_task(self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (:class:`google.cloud.tasks_v2beta3.types.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-tasks", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudTasksAsyncClient", +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py new file mode 100644 index 00000000..7ec21704 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py @@ -0,0 +1,1942 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[CloudTasksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def queue_path(project: str,location: str,queue: str,) -> str: + """Returns a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str,str]: + """Parses a queue path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str,location: str,queue: str,task: str,) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str,str]: + """Parses a task path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudTasksTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_queues(self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue(self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (google.cloud.tasks_v2beta3.types.GetQueueRequest): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_queue(self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2beta3.types.CreateQueueRequest): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_queue(self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2beta3.types.UpdateQueueRequest): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("queue.name", request.queue.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_queue(self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (google.cloud.tasks_v2beta3.types.DeleteQueueRequest): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def purge_queue(self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (google.cloud.tasks_v2beta3.types.PurgeQueueRequest): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def pause_queue(self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Args: + request (google.cloud.tasks_v2beta3.types.PauseQueueRequest): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_queue(self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (google.cloud.tasks_v2beta3.types.ResumeQueueRequest): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for `GetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for `SetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for + `TestIamPermissions` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (Sequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tasks(self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (google.cloud.tasks_v2beta3.types.ListTasksRequest): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task(self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (google.cloud.tasks_v2beta3.types.GetTaskRequest): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task(self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (google.cloud.tasks_v2beta3.types.CreateTaskRequest): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.tasks_v2beta3.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_task(self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (google.cloud.tasks_v2beta3.types.DeleteTaskRequest): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def run_task(self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (google.cloud.tasks_v2beta3.types.RunTaskRequest): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-tasks", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudTasksClient", +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py new file mode 100644 index 00000000..cbcd4de3 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py new file mode 100644 index 00000000..3db96829 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry['grpc'] = CloudTasksGrpcTransport +_transport_registry['grpc_asyncio'] = CloudTasksGrpcAsyncIOTransport + +__all__ = ( + 'CloudTasksTransport', + 'CloudTasksGrpcTransport', + 'CloudTasksGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py new file mode 100644 index 00000000..d184a5ab --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-tasks', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudtasks.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=20.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( +initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=20.0, + client_info=client_info, + ), + } + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + Union[ + cloudtasks.ListQueuesResponse, + Awaitable[cloudtasks.ListQueuesResponse] + ]]: + raise NotImplementedError() + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + Union[ + gct_queue.Queue, + Awaitable[gct_queue.Queue] + ]]: + raise NotImplementedError() + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Union[ + gct_queue.Queue, + Awaitable[gct_queue.Queue] + ]]: + raise NotImplementedError() + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + Union[ + queue.Queue, + Awaitable[queue.Queue] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + Union[ + cloudtasks.ListTasksResponse, + Awaitable[cloudtasks.ListTasksResponse] + ]]: + raise NotImplementedError() + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + Union[ + gct_task.Task, + Awaitable[gct_task.Task] + ]]: + raise NotImplementedError() + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + Union[ + task.Task, + Awaitable[task.Task] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'CloudTasksTransport', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py new file mode 100644 index 00000000..c8114f63 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_queues' not in self._stubs: + self._stubs['list_queues'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/ListQueues', + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs['list_queues'] + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_queue' not in self._stubs: + self._stubs['get_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/GetQueue', + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['get_queue'] + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_queue' not in self._stubs: + self._stubs['create_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue', + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['create_queue'] + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_queue' not in self._stubs: + self._stubs['update_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue', + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['update_queue'] + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_queue' not in self._stubs: + self._stubs['delete_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue', + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_queue'] + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'purge_queue' not in self._stubs: + self._stubs['purge_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue', + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['purge_queue'] + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pause_queue' not in self._stubs: + self._stubs['pause_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue', + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['pause_queue'] + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_queue' not in self._stubs: + self._stubs['resume_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue', + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['resume_queue'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/ListTasks', + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/GetTask', + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/CreateTask', + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs['create_task'] + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask', + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_task'] + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/RunTask', + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['run_task'] + + +__all__ = ( + 'CloudTasksGrpcTransport', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 00000000..1574d1e1 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,784 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudtasks.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues(self) -> Callable[ + [cloudtasks.ListQueuesRequest], + Awaitable[cloudtasks.ListQueuesResponse]]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_queues' not in self._stubs: + self._stubs['list_queues'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/ListQueues', + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs['list_queues'] + + @property + def get_queue(self) -> Callable[ + [cloudtasks.GetQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_queue' not in self._stubs: + self._stubs['get_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/GetQueue', + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['get_queue'] + + @property + def create_queue(self) -> Callable[ + [cloudtasks.CreateQueueRequest], + Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_queue' not in self._stubs: + self._stubs['create_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue', + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['create_queue'] + + @property + def update_queue(self) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_queue' not in self._stubs: + self._stubs['update_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue', + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs['update_queue'] + + @property + def delete_queue(self) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_queue' not in self._stubs: + self._stubs['delete_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue', + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_queue'] + + @property + def purge_queue(self) -> Callable[ + [cloudtasks.PurgeQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'purge_queue' not in self._stubs: + self._stubs['purge_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue', + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['purge_queue'] + + @property + def pause_queue(self) -> Callable[ + [cloudtasks.PauseQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pause_queue' not in self._stubs: + self._stubs['pause_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue', + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['pause_queue'] + + @property + def resume_queue(self) -> Callable[ + [cloudtasks.ResumeQueueRequest], + Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'resume_queue' not in self._stubs: + self._stubs['resume_queue'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue', + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs['resume_queue'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def list_tasks(self) -> Callable[ + [cloudtasks.ListTasksRequest], + Awaitable[cloudtasks.ListTasksResponse]]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tasks' not in self._stubs: + self._stubs['list_tasks'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/ListTasks', + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs['list_tasks'] + + @property + def get_task(self) -> Callable[ + [cloudtasks.GetTaskRequest], + Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_task' not in self._stubs: + self._stubs['get_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/GetTask', + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['get_task'] + + @property + def create_task(self) -> Callable[ + [cloudtasks.CreateTaskRequest], + Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_task' not in self._stubs: + self._stubs['create_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/CreateTask', + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs['create_task'] + + @property + def delete_task(self) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_task' not in self._stubs: + self._stubs['delete_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask', + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_task'] + + @property + def run_task(self) -> Callable[ + [cloudtasks.RunTaskRequest], + Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'run_task' not in self._stubs: + self._stubs['run_task'] = self.grpc_channel.unary_unary( + '/google.cloud.tasks.v2beta3.CloudTasks/RunTask', + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs['run_task'] + + +__all__ = ( + 'CloudTasksGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py new file mode 100644 index 00000000..3b97518d --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudtasks import ( + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from .queue import ( + Queue, + QueueStats, + RateLimits, + RetryConfig, + StackdriverLoggingConfig, +) +from .target import ( + AppEngineHttpQueue, + AppEngineHttpRequest, + AppEngineRouting, + HttpRequest, + OAuthToken, + OidcToken, + PullMessage, + HttpMethod, +) +from .task import ( + Attempt, + Task, +) + +__all__ = ( + 'CreateQueueRequest', + 'CreateTaskRequest', + 'DeleteQueueRequest', + 'DeleteTaskRequest', + 'GetQueueRequest', + 'GetTaskRequest', + 'ListQueuesRequest', + 'ListQueuesResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'PauseQueueRequest', + 'PurgeQueueRequest', + 'ResumeQueueRequest', + 'RunTaskRequest', + 'UpdateQueueRequest', + 'Queue', + 'QueueStats', + 'RateLimits', + 'RetryConfig', + 'StackdriverLoggingConfig', + 'AppEngineHttpQueue', + 'AppEngineHttpRequest', + 'AppEngineRouting', + 'HttpRequest', + 'OAuthToken', + 'OidcToken', + 'PullMessage', + 'HttpMethod', + 'Attempt', + 'Task', +) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py new file mode 100644 index 00000000..5d3baf41 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py @@ -0,0 +1,579 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta3', + manifest={ + 'ListQueuesRequest', + 'ListQueuesResponse', + 'GetQueueRequest', + 'CreateQueueRequest', + 'UpdateQueueRequest', + 'DeleteQueueRequest', + 'PurgeQueueRequest', + 'PauseQueueRequest', + 'ResumeQueueRequest', + 'ListTasksRequest', + 'ListTasksResponse', + 'GetTaskRequest', + 'CreateTaskRequest', + 'DeleteTaskRequest', + 'RunTaskRequest', + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2beta3.Queue] field can be used + as a filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "state: PAUSED". + + Note that using filters might cause fewer queues than the + requested page_size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2beta3.ListQueuesRequest.filter] + while iterating through pages. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask_pb2.FieldMask, + ) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Attributes: + queues (Sequence[google.cloud.tasks_v2beta3.types.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2beta3.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + read_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] cannot + be the same as an existing queue. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + queue = proto.Field( + proto.MESSAGE, + number=2, + message=gct_queue.Queue, + ) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + + Attributes: + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2beta3.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2beta3.Queue.name] + cannot be changed. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue = proto.Field( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Attributes: + tasks (Sequence[google.cloud.tasks_v2beta3.types.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] + with this value as the + [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (google.cloud.tasks_v2beta3.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name is + not specified then the system will generate a random unique + task id, which will be set in the task returned in the + [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or executed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour after + the original task was deleted or executed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9days + after the original task was deleted or executed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + task = proto.Field( + proto.MESSAGE, + number=2, + message=gct_task.Task, + ) + response_view = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + response_view = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py new file mode 100644 index 00000000..228b887a --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2beta3.types import target +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta3', + manifest={ + 'Queue', + 'RateLimits', + 'RetryConfig', + 'StackdriverLoggingConfig', + 'QueueStats', + }, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + queue types, and others. + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_http_queue (google.cloud.tasks_v2beta3.types.AppEngineHttpQueue): + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + settings apply only to [App Engine + tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest] in + this queue. [Http + tasks][google.cloud.tasks.v2beta3.HttpRequest] are not + affected by this proto. + rate_limits (google.cloud.tasks_v2beta3.types.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] + and + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + are related because they both control task attempts. However + they control task attempts in different ways: + + - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + + The queue's actual dispatch rate is the result of: + + - Number of tasks in the queue + - User-specified throttling: + [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits], + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config], + and the [queue's + state][google.cloud.tasks.v2beta3.Queue.state]. + - System throttling due to ``429`` (Too Many Requests) or + ``503`` (Service Unavailable) responses from the worker, + high error rates, or to smooth sudden large traffic + spikes. + retry_config (google.cloud.tasks_v2beta3.types.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (google.cloud.tasks_v2beta3.types.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by calling + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2beta3.Task.create_time] + before this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + task_ttl (google.protobuf.duration_pb2.Duration): + The maximum amount of time that a task will be retained in + this queue. + + Queues created by Cloud Tasks have a default ``task_ttl`` of + 31 days. After a task has lived for ``task_ttl``, the task + will be deleted regardless of whether it was dispatched or + not. + + The ``task_ttl`` for queues created via queue.yaml/xml is + equal to the maximum duration because there is a `storage + quota `__ + for these queues. To view the maximum valid duration, see + the documentation for [Duration][google.protobuf.Duration]. + tombstone_ttl (google.protobuf.duration_pb2.Duration): + The task tombstone time to live (TTL). + + After a task is deleted or executed, the task's tombstone is + retained for the length of time specified by + ``tombstone_ttl``. The tombstone is used by task + de-duplication; another task with the same name can't be + created until the tombstone has expired. For more + information about task de-duplication, see the documentation + for + [CreateTaskRequest][google.cloud.tasks.v2beta3.CreateTaskRequest.task]. + + Queues created by Cloud Tasks have a default + ``tombstone_ttl`` of 1 hour. + stackdriver_logging_config (google.cloud.tasks_v2beta3.types.StackdriverLoggingConfig): + Configuration options for writing logs to `Stackdriver + Logging `__. If this + field is unset, then no logs are written. + type_ (google.cloud.tasks_v2beta3.types.Queue.Type): + Immutable. The type of a queue (push or pull). + + ``Queue.type`` is an immutable property of the queue that is + set at the queue creation time. When left unspecified, the + default value of ``PUSH`` is selected. + stats (google.cloud.tasks_v2beta3.types.QueueStats): + Output only. The realtime, informational + statistics for a queue. In order to receive the + statistics the caller should include this field + in the FieldMask. + """ + class State(proto.Enum): + r"""State of the queue.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + class Type(proto.Enum): + r"""The type of the queue.""" + TYPE_UNSPECIFIED = 0 + PULL = 1 + PUSH = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_queue = proto.Field( + proto.MESSAGE, + number=3, + oneof='queue_type', + message=target.AppEngineHttpQueue, + ) + rate_limits = proto.Field( + proto.MESSAGE, + number=4, + message='RateLimits', + ) + retry_config = proto.Field( + proto.MESSAGE, + number=5, + message='RetryConfig', + ) + state = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + purge_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + task_ttl = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + tombstone_ttl = proto.Field( + proto.MESSAGE, + number=9, + message=duration_pb2.Duration, + ) + stackdriver_logging_config = proto.Field( + proto.MESSAGE, + number=10, + message='StackdriverLoggingConfig', + ) + type_ = proto.Field( + proto.ENUM, + number=11, + enum=Type, + ) + stats = proto.Field( + proto.MESSAGE, + number=12, + message='QueueStats', + ) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask], will run a + task even if the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits]. + + Attributes: + max_dispatches_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - For [App Engine + queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], + the maximum allowed value is 500. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + + The default value of ``max_burst_size`` is picked by Cloud + Tasks based on the value of + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + + The maximum value of ``max_burst_size`` is 500. + + For App Engine queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + If + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + is called on a queue without explicitly setting a value for + ``max_burst_size``, ``max_burst_size`` value will get + updated if + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + is updating + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + max_concurrent_dispatches (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_dispatches_per_second = proto.Field( + proto.DOUBLE, + number=1, + ) + max_burst_size = proto.Field( + proto.INT32, + number=2, + ) + max_concurrent_dispatches = proto.Field( + proto.INT32, + number=3, + ) + + +class RetryConfig(proto.Message): + r"""Retry config. + These settings determine when a failed task attempt is retried. + + Attributes: + max_attempts (int): + Number of attempts per task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be >= -1. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + -1 indicates unlimited attempts. + + This field has the same meaning as `task_retry_limit in + queue.yaml/xml `__. + max_retry_duration (google.protobuf.duration_pb2.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts = proto.Field( + proto.INT32, + number=1, + ) + max_retry_duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + min_backoff = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + max_backoff = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_doublings = proto.Field( + proto.INT32, + number=5, + ) + + +class StackdriverLoggingConfig(proto.Message): + r"""Configuration options for writing logs to `Stackdriver + Logging `__. + + Attributes: + sampling_ratio (float): + Specifies the fraction of operations to write to + `Stackdriver + Logging `__. This + field may contain any value between 0.0 and 1.0, inclusive. + 0.0 is the default and means that no operations are logged. + """ + + sampling_ratio = proto.Field( + proto.DOUBLE, + number=1, + ) + + +class QueueStats(proto.Message): + r"""Statistics for a queue. + Attributes: + tasks_count (int): + Output only. An estimation of the number of + tasks in the queue, that is, the tasks in the + queue that haven't been executed, the tasks in + the queue which the queue has dispatched but has + not yet received a reply for, and the failed + tasks that the queue is retrying. + oldest_estimated_arrival_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. An estimation of the nearest + time in the future where a task in the queue is + scheduled to be executed. + executed_last_minute_count (int): + Output only. The number of tasks that the + queue has dispatched and received a reply for + during the last minute. This variable counts + both successful and non-successful executions. + concurrent_dispatches_count (int): + Output only. The number of requests that the + queue has dispatched but has not received a + reply for yet. + effective_execution_rate (float): + Output only. The current maximum number of + tasks per second executed by the queue. The + maximum value of this variable is controlled by + the RateLimits of the Queue. However, this value + could be less to avoid overloading the endpoints + tasks in the queue are targeting. + """ + + tasks_count = proto.Field( + proto.INT64, + number=1, + ) + oldest_estimated_arrival_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + executed_last_minute_count = proto.Field( + proto.INT64, + number=3, + ) + concurrent_dispatches_count = proto.Field( + proto.INT64, + number=4, + ) + effective_execution_rate = proto.Field( + proto.DOUBLE, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py new file mode 100644 index 00000000..09b67def --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py @@ -0,0 +1,620 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta3', + manifest={ + 'HttpMethod', + 'PullMessage', + 'HttpRequest', + 'AppEngineHttpQueue', + 'AppEngineHttpRequest', + 'AppEngineRouting', + 'OAuthToken', + 'OidcToken', + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the task.""" + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class PullMessage(proto.Message): + r"""Pull Message. + + This proto can only be used for tasks in a queue which has + [PULL][google.cloud.tasks.v2beta3.Queue.type] type. It currently + exists for backwards compatibility with the App Engine Task Queue + SDK. This message type maybe returned with methods + [list][google.cloud.tasks.v2beta3.CloudTask.ListTasks] and + [get][google.cloud.tasks.v2beta3.CloudTask.ListTasks], when the + response view is [FULL][google.cloud.tasks.v2beta3.Task.View.Full]. + + Attributes: + payload (bytes): + A data payload consumed by the worker to + execute the task. + tag (str): + The tasks's tag. + + The tag is less than 500 characters. + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. If a tag + isn't UTF-8 encoded, the tag will be empty when the task is + returned by Cloud Tasks. + """ + + payload = proto.Field( + proto.BYTES, + number=1, + ) + tag = proto.Field( + proto.STRING, + number=2, + ) + + +class HttpRequest(proto.Message): + r"""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the + worker or the redirected worker acknowledges the task by returning a + successful HTTP response code ([``200`` - ``299``]), the task will + be removed from the queue. If any other HTTP response code is + returned or no response is received, the task will be retried + according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2beta3.Queue.retry_config], + [rate limits][google.cloud.tasks.v2beta3.Queue.rate_limits], and + the [queue's state][google.cloud.tasks.v2beta3.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff + specified in [rate + limits][google.cloud.tasks.v2beta3.Queue.rate_limits] will be + used. But if the worker returns ``429`` (Too Many Requests), + ``503`` (Service Unavailable), or the rate of errors is high, + Cloud Tasks will use a higher backoff rate. The retry specified + in the ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden increases in + traffic, dispatches ramp up slowly when the queue is newly + created or idle and if large numbers of tasks suddenly become + available to dispatch (due to spikes in create task rates, the + queue being unpaused, or many tasks that are scheduled at the + same time). + + Attributes: + url (str): + Required. The full url path that the request will be sent + to. + + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode + some characters for safety and compatibility. The maximum + allowed URL length is 2083 characters after encoding. + + The ``Location`` header response from a redirect response + [``300`` - ``399``] may be followed. The redirect is not + counted as a separate attempt. + http_method (google.cloud.tasks_v2beta3.types.HttpMethod): + The HTTP method to use for the request. The + default is POST. + headers (Sequence[google.cloud.tasks_v2beta3.types.HttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2beta3.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + - X-Google-*: Google use only. + - X-AppEngine-*: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. + body (bytes): + HTTP request body. + + A request body is allowed only if the [HTTP + method][google.cloud.tasks.v2beta3.HttpRequest.http_method] + is POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + oauth_token (google.cloud.tasks_v2beta3.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + oidc_token (google.cloud.tasks_v2beta3.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + """ + + url = proto.Field( + proto.STRING, + number=1, + ) + http_method = proto.Field( + proto.ENUM, + number=2, + enum='HttpMethod', + ) + headers = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + body = proto.Field( + proto.BYTES, + number=4, + ) + oauth_token = proto.Field( + proto.MESSAGE, + number=5, + oneof='authorization_header', + message='OAuthToken', + ) + oidc_token = proto.Field( + proto.MESSAGE, + number=6, + oneof='authorization_header', + message='OidcToken', + ) + + +class AppEngineHttpQueue(proto.Message): + r"""App Engine HTTP queue. + + The task will be delivered to the App Engine application hostname + specified by its + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + and + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest]. + The documentation for + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + explains how the task's host URL is constructed. + + Using + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + app_engine_routing_override (google.cloud.tasks_v2beta3.types.AppEngineRouting): + Overrides for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + + If set, ``app_engine_routing_override`` is used for all + tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + """ + + app_engine_routing_override = proto.Field( + proto.MESSAGE, + number=1, + message='AppEngineRouting', + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2beta3.AppEngineRouting] + used to construct the URL that the task is delivered to can be set + at the queue-level or task-level: + + - If set, + [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] + is used for all tasks in the queue, no matter what the setting is + for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] ``+`` + [relative_uri][google.cloud.tasks.v2beta3.AppEngineHttpRequest.relative_uri] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][google.cloud.tasks.v2beta3.Task.dispatch_deadline]. + Failed tasks will be retried according to the [retry + configuration][google.cloud.tasks.v2beta3.Queue.retry_config]. + ``503`` (Service Unavailable) is considered an App Engine system + error instead of an application error and will cause Cloud Tasks' + traffic congestion control to temporarily throttle the queue's + dispatches. Unlike other types of task targets, a ``429`` (Too Many + Requests) response from an app handler does not cause traffic + congestion control to throttle the queue. + + Attributes: + http_method (google.cloud.tasks_v2beta3.types.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt fails with error code 405 (Method + Not Allowed). See `Writing a push task request + handler `__ + and the App Engine documentation for your runtime on `How + Requests are + Handled `__. + app_engine_routing (google.cloud.tasks_v2beta3.types.AppEngineRouting): + Task-level setting for App Engine routing. + + If set, + [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] + is used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + relative_uri (str): + The relative URI. + The relative URI must begin with "/" and must be + a valid HTTP relative URI. It can contain a path + and query string arguments. If the relative URI + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (Sequence[google.cloud.tasks_v2beta3.types.AppEngineHttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [body][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-*`` + - ``X-AppEngine-*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2beta3.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + documentation. + body (bytes): + HTTP request body. + + A request body is allowed only if the HTTP method is POST or + PUT. It is an error to set a body on a task with an + incompatible + [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + """ + + http_method = proto.Field( + proto.ENUM, + number=1, + enum='HttpMethod', + ) + app_engine_routing = proto.Field( + proto.MESSAGE, + number=2, + message='AppEngineRouting', + ) + relative_uri = proto.Field( + proto.STRING, + number=3, + ) + headers = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + body = proto.Field( + proto.BYTES, + number=5, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + The host is constructed from the domain name of the app + associated with the queue's project ID (for example + .appspot.com), and the + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + Tasks which were created using the App Engine SDK might have + a custom domain name. + + For more information, see `How Requests are + Routed `__. + """ + + service = proto.Field( + proto.STRING, + number=1, + ) + version = proto.Field( + proto.STRING, + number=2, + ) + instance = proto.Field( + proto.STRING, + number=3, + ) + host = proto.Field( + proto.STRING, + number=4, + ) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email = proto.Field( + proto.STRING, + number=1, + ) + scope = proto.Field( + proto.STRING, + number=2, + ) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email = proto.Field( + proto.STRING, + number=1, + ) + audience = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py new file mode 100644 index 00000000..66293416 --- /dev/null +++ b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.tasks_v2beta3.types import target +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.tasks.v2beta3', + manifest={ + 'Task', + 'Attempt', + }, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (google.cloud.tasks_v2beta3.types.AppEngineHttpRequest): + HTTP request that is sent to the App Engine app handler. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + set. + http_request (google.cloud.tasks_v2beta3.types.HttpRequest): + HTTP request that is sent to the task's target. + + An HTTP task is a task that has + [HttpRequest][google.cloud.tasks.v2beta3.HttpRequest] set. + pull_message (google.cloud.tasks_v2beta3.types.PullMessage): + Pull Message contained in a task in a + [PULL][google.cloud.tasks.v2beta3.Queue.type] queue type. + This payload type cannot be explicitly set through Cloud + Tasks API. Its purpose, currently is to provide backward + compatibility with App Engine Task Queue + `pull `__ + queues to provide a way to inspect contents of pull tasks + through the + [CloudTasks.GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the task is scheduled to be attempted. + + For App Engine queues, this is when the task will be + attempted or retried. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + dispatch_deadline (google.protobuf.duration_pb2.Duration): + The deadline for requests sent to the worker. If the worker + does not respond by this deadline then the request is + cancelled and the attempt is marked as a + ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the + task according to the + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + Note that when the request is cancelled, Cloud Tasks will + stop listening for the response, but whether the worker + stops processing depends on the worker. For example, if the + worker is stuck, it may not react to cancelled requests. + + The default and maximum values depend on the type of + request: + + - For [HTTP tasks][google.cloud.tasks.v2beta3.HttpRequest], + the default is 10 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine + tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest], + 0 indicates that the request has the default deadline. + The default deadline depends on the `scaling + type `__ + of the service: 10 minutes for standard apps with + automatic scaling, 24 hours for standard apps with manual + and basic scaling, and 60 minutes for flex apps. If the + request deadline is set, it must be in the interval [15 + seconds, 24 hours 15 seconds]. Regardless of the task's + ``dispatch_deadline``, the app handler will not run for + longer than than the service's timeout. We recommend + setting the ``dispatch_deadline`` to at most a few + seconds more than the app handler's timeout. For more + information see + `Timeouts `__. + + ``dispatch_deadline`` will be truncated to the nearest + millisecond. The deadline is an approximate deadline. + dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + response_count (int): + Output only. The number of attempts which + have received a response. + first_attempt (google.cloud.tasks_v2beta3.types.Attempt): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2beta3.Attempt.dispatch_time] + will be set. The other + [Attempt][google.cloud.tasks.v2beta3.Attempt] information is + not retained by Cloud Tasks. + last_attempt (google.cloud.tasks_v2beta3.types.Attempt): + Output only. The status of the task's last + attempt. + view (google.cloud.tasks_v2beta3.types.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] has been returned. + """ + class View(proto.Enum): + r"""The view specifies a subset of + [Task][google.cloud.tasks.v2beta3.Task] data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_request = proto.Field( + proto.MESSAGE, + number=3, + oneof='payload_type', + message=target.AppEngineHttpRequest, + ) + http_request = proto.Field( + proto.MESSAGE, + number=11, + oneof='payload_type', + message=target.HttpRequest, + ) + pull_message = proto.Field( + proto.MESSAGE, + number=13, + oneof='payload_type', + message=target.PullMessage, + ) + schedule_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + dispatch_deadline = proto.Field( + proto.MESSAGE, + number=12, + message=duration_pb2.Duration, + ) + dispatch_count = proto.Field( + proto.INT32, + number=6, + ) + response_count = proto.Field( + proto.INT32, + number=7, + ) + first_attempt = proto.Field( + proto.MESSAGE, + number=8, + message='Attempt', + ) + last_attempt = proto.Field( + proto.MESSAGE, + number=9, + message='Attempt', + ) + view = proto.Field( + proto.ENUM, + number=10, + enum=View, + ) + + +class Attempt(proto.Message): + r"""The status of a task attempt. + Attributes: + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (google.rpc.status_pb2.Status): + Output only. The response from the worker for this attempt. + + If ``response_time`` is unset, then the task has not been + attempted or is currently running and the + ``response_status`` field is meaningless. + """ + + schedule_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + dispatch_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + response_status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/mypy.ini b/owl-bot-staging/v2beta3/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/owl-bot-staging/v2beta3/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v2beta3/noxfile.py b/owl-bot-staging/v2beta3/noxfile.py new file mode 100644 index 00000000..ab1166a7 --- /dev/null +++ b/owl-bot-staging/v2beta3/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/tasks_v2beta3/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py b/owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py new file mode 100644 index 00000000..e9fe202e --- /dev/null +++ b/owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', 'read_mask', ), + 'get_task': ('name', 'response_view', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', 'read_mask', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2beta3/setup.py b/owl-bot-staging/v2beta3/setup.py new file mode 100644 index 00000000..2254eec1 --- /dev/null +++ b/owl-bot-staging/v2beta3/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-tasks', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v2beta3/tests/__init__.py b/owl-bot-staging/v2beta3/tests/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta3/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta3/tests/unit/__init__.py b/owl-bot-staging/v2beta3/tests/unit/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta3/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py b/owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py b/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py new file mode 100644 index 00000000..fa0bc47b --- /dev/null +++ b/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py @@ -0,0 +1,5211 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksAsyncClient +from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksClient +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.services.cloud_tasks import transports +from google.cloud.tasks_v2beta3.services.cloud_tasks.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + +@pytest.mark.parametrize("client_class", [ + CloudTasksClient, + CloudTasksAsyncClient, +]) +def test_cloud_tasks_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + available_transports = [ + transports.CloudTasksGrpcTransport, + ] + assert transport in available_transports + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) +@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) +def test_cloud_tasks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) +@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_tasks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_tasks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_tasks_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_queues(transport: str = 'grpc', request_type=cloudtasks.ListQueuesRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_queues_from_dict(): + test_list_queues(request_type=dict) + + +def test_list_queues_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + client.list_queues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + +@pytest.mark.asyncio +async def test_list_queues_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListQueuesRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_queues_async_from_dict(): + await test_list_queues_async(request_type=dict) + + +def test_list_queues_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + call.return_value = cloudtasks.ListQueuesResponse() + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_queues_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_queues_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), + parent='parent_value', + ) + + +def test_list_queues_pager(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) + for i in results) + +def test_list_queues_pages(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_queues(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) + for i in responses) + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token='abc', + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token='def', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token='ghi', + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_queues(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_queue(transport: str = 'grpc', request_type=cloudtasks.GetQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_get_queue_from_dict(): + test_get_queue(request_type=dict) + + +def test_get_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + client.get_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + +@pytest.mark.asyncio +async def test_get_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + )) + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_get_queue_async_from_dict(): + await test_get_queue_async(request_type=dict) + + +def test_get_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), + name='name_value', + ) + + +def test_create_queue(transport: str = 'grpc', request_type=cloudtasks.CreateQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +def test_create_queue_from_dict(): + test_create_queue(request_type=dict) + + +def test_create_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + client.create_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + +@pytest.mark.asyncio +async def test_create_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + )) + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_create_queue_async_from_dict(): + await test_create_queue_async(request_type=dict) + + +def test_create_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + call.return_value = gct_queue.Queue() + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].queue == gct_queue.Queue(name='name_value') + + +def test_create_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].queue == gct_queue.Queue(name='name_value') + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent='parent_value', + queue=gct_queue.Queue(name='name_value'), + ) + + +def test_update_queue(transport: str = 'grpc', request_type=cloudtasks.UpdateQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +def test_update_queue_from_dict(): + test_update_queue(request_type=dict) + + +def test_update_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + client.update_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + +@pytest.mark.asyncio +async def test_update_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.UpdateQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( + name='name_value', + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + )) + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == 'name_value' + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_update_queue_async_from_dict(): + await test_update_queue_async(request_type=dict) + + +def test_update_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = 'queue.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + call.return_value = gct_queue.Queue() + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'queue.name=queue.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = 'queue.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'queue.name=queue.name/value', + ) in kw['metadata'] + + +def test_update_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].queue == gct_queue.Queue(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].queue == gct_queue.Queue(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_queue(transport: str = 'grpc', request_type=cloudtasks.DeleteQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_from_dict(): + test_delete_queue(request_type=dict) + + +def test_delete_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + client.delete_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + +@pytest.mark.asyncio +async def test_delete_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_queue_async_from_dict(): + await test_delete_queue_async(request_type=dict) + + +def test_delete_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + call.return_value = None + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name='name_value', + ) + + +def test_purge_queue(transport: str = 'grpc', request_type=cloudtasks.PurgeQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_purge_queue_from_dict(): + test_purge_queue(request_type=dict) + + +def test_purge_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + client.purge_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + +@pytest.mark.asyncio +async def test_purge_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PurgeQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + )) + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_purge_queue_async_from_dict(): + await test_purge_queue_async(request_type=dict) + + +def test_purge_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_purge_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name='name_value', + ) + + +def test_pause_queue(transport: str = 'grpc', request_type=cloudtasks.PauseQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_pause_queue_from_dict(): + test_pause_queue(request_type=dict) + + +def test_pause_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + client.pause_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + +@pytest.mark.asyncio +async def test_pause_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PauseQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + )) + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_pause_queue_async_from_dict(): + await test_pause_queue_async(request_type=dict) + + +def test_pause_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_pause_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), + name='name_value', + ) + + +def test_resume_queue(transport: str = 'grpc', request_type=cloudtasks.ResumeQueueRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), + ) + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_resume_queue_from_dict(): + test_resume_queue(request_type=dict) + + +def test_resume_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + client.resume_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + +@pytest.mark.asyncio +async def test_resume_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ResumeQueueRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( + name='name_value', + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + )) + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == 'name_value' + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_resume_queue_async_from_dict(): + await test_resume_queue_async(request_type=dict) + + +def test_resume_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + call.return_value = queue.Queue() + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_resume_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_queue), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name='name_value', + ) + + +def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_list_tasks(transport: str = 'grpc', request_type=cloudtasks.ListTasksRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token='next_page_token_value', + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_tasks_from_dict(): + test_list_tasks(request_type=dict) + + +def test_list_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + client.list_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListTasksRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + + +def test_list_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = cloudtasks.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), + parent='parent_value', + ) + + +def test_list_tasks_pager(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, task.Task) + for i in results) + +def test_list_tasks_pages(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) + for i in responses) + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token='abc', + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token='def', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token='ghi', + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_tasks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_task(transport: str = 'grpc', request_type=cloudtasks.GetTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_get_task_from_dict(): + test_get_task(request_type=dict) + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + )) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + + +def test_get_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = task.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), + name='name_value', + ) + + +def test_create_task(transport: str = 'grpc', request_type=cloudtasks.CreateTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_from_dict(): + test_create_task(request_type=dict) + + +def test_create_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + client.create_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + )) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + + +def test_create_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = gct_task.Task() + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].task == gct_task.Task(name='name_value') + + +def test_create_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].task == gct_task.Task(name='name_value') + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent='parent_value', + task=gct_task.Task(name='name_value'), + ) + + +def test_delete_task(transport: str = 'grpc', request_type=cloudtasks.DeleteTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_from_dict(): + test_delete_task(request_type=dict) + + +def test_delete_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + client.delete_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + + +def test_delete_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = None + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), + name='name_value', + ) + + +def test_run_task(transport: str = 'grpc', request_type=cloudtasks.RunTaskRequest): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_run_task_from_dict(): + test_run_task(request_type=dict) + + +def test_run_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + client.run_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RunTaskRequest): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( + name='name_value', + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + )) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == 'name_value' + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + + +def test_run_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = task.Task() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_run_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_run_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.run_task), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudTasksGrpcTransport, + ) + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_queues', + 'get_queue', + 'create_queue', + 'update_queue', + 'delete_queue', + 'purge_queue', + 'pause_queue', + 'resume_queue', + 'get_iam_policy', + 'set_iam_policy', + 'test_iam_permissions', + 'list_tasks', + 'get_task', + 'create_task', + 'delete_task', + 'run_task', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_tasks_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudTasksGrpcTransport, grpc_helpers), + (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_cloud_tasks_host_no_port(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com'), + ) + assert client.transport._host == 'cloudtasks.googleapis.com:443' + + +def test_cloud_tasks_host_with_port(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com:8000'), + ) + assert client.transport._host == 'cloudtasks.googleapis.com:8000' + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) +def test_cloud_tasks_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + expected = "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + +def test_task_path(): + project = "cuttlefish" + location = "mussel" + queue = "winkle" + task = "nautilus" + expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "scallop", + "location": "abalone", + "queue": "squid", + "task": "clam", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudTasksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudTasksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudTasksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudTasksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudTasksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudTasksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = CloudTasksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudTasksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudTasksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudTasksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) From e32fd7a836fc202e66c3c3c33619f1342ef00a5e Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 21 Jun 2021 23:59:09 +0000 Subject: [PATCH 2/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md --- .coveragerc | 1 - .../services/cloud_tasks/transports/base.py | 40 +- .../services/cloud_tasks/transports/grpc.py | 7 +- .../cloud_tasks/transports/grpc_asyncio.py | 7 +- .../services/cloud_tasks/transports/base.py | 40 +- .../services/cloud_tasks/transports/grpc.py | 7 +- .../cloud_tasks/transports/grpc_asyncio.py | 7 +- .../services/cloud_tasks/transports/base.py | 40 +- .../services/cloud_tasks/transports/grpc.py | 7 +- .../cloud_tasks/transports/grpc_asyncio.py | 7 +- owl-bot-staging/v2/.coveragerc | 17 - owl-bot-staging/v2/MANIFEST.in | 2 - owl-bot-staging/v2/README.rst | 49 - owl-bot-staging/v2/docs/conf.py | 376 - owl-bot-staging/v2/docs/index.rst | 7 - .../v2/docs/tasks_v2/cloud_tasks.rst | 10 - owl-bot-staging/v2/docs/tasks_v2/services.rst | 6 - owl-bot-staging/v2/docs/tasks_v2/types.rst | 7 - .../v2/google/cloud/tasks/__init__.py | 77 - .../v2/google/cloud/tasks/py.typed | 2 - .../v2/google/cloud/tasks_v2/__init__.py | 78 - .../google/cloud/tasks_v2/gapic_metadata.json | 183 - .../v2/google/cloud/tasks_v2/py.typed | 2 - .../cloud/tasks_v2/services/__init__.py | 15 - .../tasks_v2/services/cloud_tasks/__init__.py | 22 - .../services/cloud_tasks/async_client.py | 1801 ----- .../tasks_v2/services/cloud_tasks/client.py | 1940 ------ .../tasks_v2/services/cloud_tasks/pagers.py | 264 - .../cloud_tasks/transports/__init__.py | 33 - .../services/cloud_tasks/transports/base.py | 441 -- .../services/cloud_tasks/transports/grpc.py | 779 --- .../cloud_tasks/transports/grpc_asyncio.py | 783 --- .../google/cloud/tasks_v2/types/__init__.py | 80 - .../google/cloud/tasks_v2/types/cloudtasks.py | 558 -- .../v2/google/cloud/tasks_v2/types/queue.py | 434 -- .../v2/google/cloud/tasks_v2/types/target.py | 548 -- .../v2/google/cloud/tasks_v2/types/task.py | 262 - owl-bot-staging/v2/mypy.ini | 3 - owl-bot-staging/v2/noxfile.py | 132 - .../v2/scripts/fixup_tasks_v2_keywords.py | 191 - owl-bot-staging/v2/setup.py | 53 - owl-bot-staging/v2/tests/__init__.py | 16 - owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../v2/tests/unit/gapic/tasks_v2/__init__.py | 16 - .../unit/gapic/tasks_v2/test_cloud_tasks.py | 5181 -------------- owl-bot-staging/v2beta2/.coveragerc | 17 - owl-bot-staging/v2beta2/MANIFEST.in | 2 - owl-bot-staging/v2beta2/README.rst | 49 - owl-bot-staging/v2beta2/docs/conf.py | 376 - owl-bot-staging/v2beta2/docs/index.rst | 7 - .../docs/tasks_v2beta2/cloud_tasks.rst | 10 - .../v2beta2/docs/tasks_v2beta2/services.rst | 6 - .../v2beta2/docs/tasks_v2beta2/types.rst | 7 - .../v2beta2/google/cloud/tasks/__init__.py | 89 - .../v2beta2/google/cloud/tasks/py.typed | 2 - .../google/cloud/tasks_v2beta2/__init__.py | 90 - .../cloud/tasks_v2beta2/gapic_metadata.json | 223 - .../google/cloud/tasks_v2beta2/py.typed | 2 - .../cloud/tasks_v2beta2/services/__init__.py | 15 - .../services/cloud_tasks/__init__.py | 22 - .../services/cloud_tasks/async_client.py | 2249 ------ .../services/cloud_tasks/client.py | 2388 ------- .../services/cloud_tasks/pagers.py | 264 - .../cloud_tasks/transports/__init__.py | 33 - .../services/cloud_tasks/transports/base.py | 497 -- .../services/cloud_tasks/transports/grpc.py | 942 --- .../cloud_tasks/transports/grpc_asyncio.py | 946 --- .../cloud/tasks_v2beta2/types/__init__.py | 92 - .../cloud/tasks_v2beta2/types/cloudtasks.py | 869 --- .../google/cloud/tasks_v2beta2/types/queue.py | 530 -- .../cloud/tasks_v2beta2/types/target.py | 487 -- .../google/cloud/tasks_v2beta2/types/task.py | 254 - owl-bot-staging/v2beta2/mypy.ini | 3 - owl-bot-staging/v2beta2/noxfile.py | 132 - .../scripts/fixup_tasks_v2beta2_keywords.py | 195 - owl-bot-staging/v2beta2/setup.py | 53 - owl-bot-staging/v2beta2/tests/__init__.py | 16 - .../v2beta2/tests/unit/__init__.py | 16 - .../v2beta2/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/tasks_v2beta2/__init__.py | 16 - .../gapic/tasks_v2beta2/test_cloud_tasks.py | 6121 ----------------- owl-bot-staging/v2beta3/.coveragerc | 17 - owl-bot-staging/v2beta3/MANIFEST.in | 2 - owl-bot-staging/v2beta3/README.rst | 49 - owl-bot-staging/v2beta3/docs/conf.py | 376 - owl-bot-staging/v2beta3/docs/index.rst | 7 - .../docs/tasks_v2beta3/cloud_tasks.rst | 10 - .../v2beta3/docs/tasks_v2beta3/services.rst | 6 - .../v2beta3/docs/tasks_v2beta3/types.rst | 7 - .../v2beta3/google/cloud/tasks/__init__.py | 83 - .../v2beta3/google/cloud/tasks/py.typed | 2 - .../google/cloud/tasks_v2beta3/__init__.py | 84 - .../cloud/tasks_v2beta3/gapic_metadata.json | 183 - .../google/cloud/tasks_v2beta3/py.typed | 2 - .../cloud/tasks_v2beta3/services/__init__.py | 15 - .../services/cloud_tasks/__init__.py | 22 - .../services/cloud_tasks/async_client.py | 1803 ----- .../services/cloud_tasks/client.py | 1942 ------ .../services/cloud_tasks/pagers.py | 264 - .../cloud_tasks/transports/__init__.py | 33 - .../services/cloud_tasks/transports/base.py | 441 -- .../services/cloud_tasks/transports/grpc.py | 780 --- .../cloud_tasks/transports/grpc_asyncio.py | 784 --- .../cloud/tasks_v2beta3/types/__init__.py | 86 - .../cloud/tasks_v2beta3/types/cloudtasks.py | 579 -- .../google/cloud/tasks_v2beta3/types/queue.py | 556 -- .../cloud/tasks_v2beta3/types/target.py | 620 -- .../google/cloud/tasks_v2beta3/types/task.py | 280 - owl-bot-staging/v2beta3/mypy.ini | 3 - owl-bot-staging/v2beta3/noxfile.py | 132 - .../scripts/fixup_tasks_v2beta3_keywords.py | 191 - owl-bot-staging/v2beta3/setup.py | 53 - owl-bot-staging/v2beta3/tests/__init__.py | 16 - .../v2beta3/tests/unit/__init__.py | 16 - .../v2beta3/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/tasks_v2beta3/__init__.py | 16 - .../gapic/tasks_v2beta3/test_cloud_tasks.py | 5211 -------------- tests/unit/gapic/tasks_v2/test_cloud_tasks.py | 100 +- .../gapic/tasks_v2beta2/test_cloud_tasks.py | 142 +- .../gapic/tasks_v2beta3/test_cloud_tasks.py | 102 +- 121 files changed, 135 insertions(+), 46495 deletions(-) delete mode 100644 owl-bot-staging/v2/.coveragerc delete mode 100644 owl-bot-staging/v2/MANIFEST.in delete mode 100644 owl-bot-staging/v2/README.rst delete mode 100644 owl-bot-staging/v2/docs/conf.py delete mode 100644 owl-bot-staging/v2/docs/index.rst delete mode 100644 owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst delete mode 100644 owl-bot-staging/v2/docs/tasks_v2/services.rst delete mode 100644 owl-bot-staging/v2/docs/tasks_v2/types.rst delete mode 100644 owl-bot-staging/v2/google/cloud/tasks/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py delete mode 100644 owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py delete mode 100644 owl-bot-staging/v2/mypy.ini delete mode 100644 owl-bot-staging/v2/noxfile.py delete mode 100644 owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py delete mode 100644 owl-bot-staging/v2/setup.py delete mode 100644 owl-bot-staging/v2/tests/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py delete mode 100644 owl-bot-staging/v2beta2/.coveragerc delete mode 100644 owl-bot-staging/v2beta2/MANIFEST.in delete mode 100644 owl-bot-staging/v2beta2/README.rst delete mode 100644 owl-bot-staging/v2beta2/docs/conf.py delete mode 100644 owl-bot-staging/v2beta2/docs/index.rst delete mode 100644 owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst delete mode 100644 owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst delete mode 100644 owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks/py.typed delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py delete mode 100644 owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py delete mode 100644 owl-bot-staging/v2beta2/mypy.ini delete mode 100644 owl-bot-staging/v2beta2/noxfile.py delete mode 100644 owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py delete mode 100644 owl-bot-staging/v2beta2/setup.py delete mode 100644 owl-bot-staging/v2beta2/tests/__init__.py delete mode 100644 owl-bot-staging/v2beta2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py delete mode 100644 owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py delete mode 100644 owl-bot-staging/v2beta3/.coveragerc delete mode 100644 owl-bot-staging/v2beta3/MANIFEST.in delete mode 100644 owl-bot-staging/v2beta3/README.rst delete mode 100644 owl-bot-staging/v2beta3/docs/conf.py delete mode 100644 owl-bot-staging/v2beta3/docs/index.rst delete mode 100644 owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst delete mode 100644 owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst delete mode 100644 owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks/py.typed delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py delete mode 100644 owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py delete mode 100644 owl-bot-staging/v2beta3/mypy.ini delete mode 100644 owl-bot-staging/v2beta3/noxfile.py delete mode 100644 owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py delete mode 100644 owl-bot-staging/v2beta3/setup.py delete mode 100644 owl-bot-staging/v2beta3/tests/__init__.py delete mode 100644 owl-bot-staging/v2beta3/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py delete mode 100644 owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py diff --git a/.coveragerc b/.coveragerc index eb43c0f5..1d5bc53f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,7 +2,6 @@ branch = True [report] -fail_under = 100 show_missing = True omit = google/cloud/tasks/__init__.py diff --git a/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py b/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py index 402cb230..3bf359eb 100644 --- a/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py +++ b/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.tasks_v2.types import cloudtasks from google.cloud.tasks_v2.types import queue @@ -50,8 +51,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class CloudTasksTransport(abc.ABC): """Abstract transport class for CloudTasks.""" @@ -69,6 +68,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -92,6 +92,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -120,13 +122,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -147,27 +156,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py index e7f3ceac..cc7b0524 100644 --- a/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py +++ b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py @@ -157,6 +157,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: @@ -212,14 +213,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py index 4773ed21..81429c9d 100644 --- a/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py +++ b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py @@ -86,14 +86,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -203,6 +203,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py index 3af4ea62..1f360f26 100644 --- a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.tasks_v2beta2.types import cloudtasks from google.cloud.tasks_v2beta2.types import queue @@ -50,8 +51,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class CloudTasksTransport(abc.ABC): """Abstract transport class for CloudTasks.""" @@ -69,6 +68,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -92,6 +92,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -120,13 +122,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -147,27 +156,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py index 72838fc7..0f7237cd 100644 --- a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py @@ -157,6 +157,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: @@ -212,14 +213,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py index a5c5bea9..e6bc7b40 100644 --- a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py @@ -86,14 +86,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -203,6 +203,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py index 5c331ac6..11eb9874 100644 --- a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.tasks_v2beta3.types import cloudtasks from google.cloud.tasks_v2beta3.types import queue @@ -50,8 +51,6 @@ except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class CloudTasksTransport(abc.ABC): """Abstract transport class for CloudTasks.""" @@ -69,6 +68,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -92,6 +92,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -120,13 +122,20 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -147,27 +156,6 @@ def _get_scopes_kwargs( return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py index 8437327f..37ae3394 100644 --- a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py @@ -157,6 +157,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: @@ -212,14 +213,14 @@ def create_channel( and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py index cc327a61..d18adf00 100644 --- a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py @@ -86,14 +86,14 @@ def create_channel( aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -203,6 +203,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index 1d5bc53f..00000000 --- a/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/tasks/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index 4fdb4f57..00000000 --- a/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/tasks *.py -recursive-include google/cloud/tasks_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst deleted file mode 100644 index 6171a7e2..00000000 --- a/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Tasks API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Tasks API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index 62c563cc..00000000 --- a/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-tasks documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-tasks" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-tasks-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-tasks.tex", - u"google-cloud-tasks Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-tasks", - u"Google Cloud Tasks Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-tasks", - u"google-cloud-tasks Documentation", - author, - "google-cloud-tasks", - "GAPIC library for Google Cloud Tasks API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index f3d07358..00000000 --- a/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - tasks_v2/services - tasks_v2/types diff --git a/owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst b/owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst deleted file mode 100644 index 11481d7c..00000000 --- a/owl-bot-staging/v2/docs/tasks_v2/cloud_tasks.rst +++ /dev/null @@ -1,10 +0,0 @@ -CloudTasks ----------------------------- - -.. automodule:: google.cloud.tasks_v2.services.cloud_tasks - :members: - :inherited-members: - -.. automodule:: google.cloud.tasks_v2.services.cloud_tasks.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2/docs/tasks_v2/services.rst b/owl-bot-staging/v2/docs/tasks_v2/services.rst deleted file mode 100644 index f24b73b1..00000000 --- a/owl-bot-staging/v2/docs/tasks_v2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Tasks v2 API -====================================== -.. toctree:: - :maxdepth: 2 - - cloud_tasks diff --git a/owl-bot-staging/v2/docs/tasks_v2/types.rst b/owl-bot-staging/v2/docs/tasks_v2/types.rst deleted file mode 100644 index 7a2a9a4b..00000000 --- a/owl-bot-staging/v2/docs/tasks_v2/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Tasks v2 API -=================================== - -.. automodule:: google.cloud.tasks_v2.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v2/google/cloud/tasks/__init__.py b/owl-bot-staging/v2/google/cloud/tasks/__init__.py deleted file mode 100644 index 054d56f6..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks/__init__.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.tasks_v2.services.cloud_tasks.client import CloudTasksClient -from google.cloud.tasks_v2.services.cloud_tasks.async_client import CloudTasksAsyncClient - -from google.cloud.tasks_v2.types.cloudtasks import CreateQueueRequest -from google.cloud.tasks_v2.types.cloudtasks import CreateTaskRequest -from google.cloud.tasks_v2.types.cloudtasks import DeleteQueueRequest -from google.cloud.tasks_v2.types.cloudtasks import DeleteTaskRequest -from google.cloud.tasks_v2.types.cloudtasks import GetQueueRequest -from google.cloud.tasks_v2.types.cloudtasks import GetTaskRequest -from google.cloud.tasks_v2.types.cloudtasks import ListQueuesRequest -from google.cloud.tasks_v2.types.cloudtasks import ListQueuesResponse -from google.cloud.tasks_v2.types.cloudtasks import ListTasksRequest -from google.cloud.tasks_v2.types.cloudtasks import ListTasksResponse -from google.cloud.tasks_v2.types.cloudtasks import PauseQueueRequest -from google.cloud.tasks_v2.types.cloudtasks import PurgeQueueRequest -from google.cloud.tasks_v2.types.cloudtasks import ResumeQueueRequest -from google.cloud.tasks_v2.types.cloudtasks import RunTaskRequest -from google.cloud.tasks_v2.types.cloudtasks import UpdateQueueRequest -from google.cloud.tasks_v2.types.queue import Queue -from google.cloud.tasks_v2.types.queue import RateLimits -from google.cloud.tasks_v2.types.queue import RetryConfig -from google.cloud.tasks_v2.types.queue import StackdriverLoggingConfig -from google.cloud.tasks_v2.types.target import AppEngineHttpRequest -from google.cloud.tasks_v2.types.target import AppEngineRouting -from google.cloud.tasks_v2.types.target import HttpRequest -from google.cloud.tasks_v2.types.target import OAuthToken -from google.cloud.tasks_v2.types.target import OidcToken -from google.cloud.tasks_v2.types.target import HttpMethod -from google.cloud.tasks_v2.types.task import Attempt -from google.cloud.tasks_v2.types.task import Task - -__all__ = ('CloudTasksClient', - 'CloudTasksAsyncClient', - 'CreateQueueRequest', - 'CreateTaskRequest', - 'DeleteQueueRequest', - 'DeleteTaskRequest', - 'GetQueueRequest', - 'GetTaskRequest', - 'ListQueuesRequest', - 'ListQueuesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'PauseQueueRequest', - 'PurgeQueueRequest', - 'ResumeQueueRequest', - 'RunTaskRequest', - 'UpdateQueueRequest', - 'Queue', - 'RateLimits', - 'RetryConfig', - 'StackdriverLoggingConfig', - 'AppEngineHttpRequest', - 'AppEngineRouting', - 'HttpRequest', - 'OAuthToken', - 'OidcToken', - 'HttpMethod', - 'Attempt', - 'Task', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks/py.typed b/owl-bot-staging/v2/google/cloud/tasks/py.typed deleted file mode 100644 index 41f0b1b8..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py deleted file mode 100644 index 43e4f607..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/__init__.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.cloud_tasks import CloudTasksClient -from .services.cloud_tasks import CloudTasksAsyncClient - -from .types.cloudtasks import CreateQueueRequest -from .types.cloudtasks import CreateTaskRequest -from .types.cloudtasks import DeleteQueueRequest -from .types.cloudtasks import DeleteTaskRequest -from .types.cloudtasks import GetQueueRequest -from .types.cloudtasks import GetTaskRequest -from .types.cloudtasks import ListQueuesRequest -from .types.cloudtasks import ListQueuesResponse -from .types.cloudtasks import ListTasksRequest -from .types.cloudtasks import ListTasksResponse -from .types.cloudtasks import PauseQueueRequest -from .types.cloudtasks import PurgeQueueRequest -from .types.cloudtasks import ResumeQueueRequest -from .types.cloudtasks import RunTaskRequest -from .types.cloudtasks import UpdateQueueRequest -from .types.queue import Queue -from .types.queue import RateLimits -from .types.queue import RetryConfig -from .types.queue import StackdriverLoggingConfig -from .types.target import AppEngineHttpRequest -from .types.target import AppEngineRouting -from .types.target import HttpRequest -from .types.target import OAuthToken -from .types.target import OidcToken -from .types.target import HttpMethod -from .types.task import Attempt -from .types.task import Task - -__all__ = ( - 'CloudTasksAsyncClient', -'AppEngineHttpRequest', -'AppEngineRouting', -'Attempt', -'CloudTasksClient', -'CreateQueueRequest', -'CreateTaskRequest', -'DeleteQueueRequest', -'DeleteTaskRequest', -'GetQueueRequest', -'GetTaskRequest', -'HttpMethod', -'HttpRequest', -'ListQueuesRequest', -'ListQueuesResponse', -'ListTasksRequest', -'ListTasksResponse', -'OAuthToken', -'OidcToken', -'PauseQueueRequest', -'PurgeQueueRequest', -'Queue', -'RateLimits', -'ResumeQueueRequest', -'RetryConfig', -'RunTaskRequest', -'StackdriverLoggingConfig', -'Task', -'UpdateQueueRequest', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json deleted file mode 100644 index 5cacaba9..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/gapic_metadata.json +++ /dev/null @@ -1,183 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.tasks_v2", - "protoPackage": "google.cloud.tasks.v2", - "schema": "1.0", - "services": { - "CloudTasks": { - "clients": { - "grpc": { - "libraryClient": "CloudTasksClient", - "rpcs": { - "CreateQueue": { - "methods": [ - "create_queue" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "DeleteQueue": { - "methods": [ - "delete_queue" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetQueue": { - "methods": [ - "get_queue" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListQueues": { - "methods": [ - "list_queues" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "PauseQueue": { - "methods": [ - "pause_queue" - ] - }, - "PurgeQueue": { - "methods": [ - "purge_queue" - ] - }, - "ResumeQueue": { - "methods": [ - "resume_queue" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateQueue": { - "methods": [ - "update_queue" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudTasksAsyncClient", - "rpcs": { - "CreateQueue": { - "methods": [ - "create_queue" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "DeleteQueue": { - "methods": [ - "delete_queue" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetQueue": { - "methods": [ - "get_queue" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListQueues": { - "methods": [ - "list_queues" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "PauseQueue": { - "methods": [ - "pause_queue" - ] - }, - "PurgeQueue": { - "methods": [ - "purge_queue" - ] - }, - "ResumeQueue": { - "methods": [ - "resume_queue" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateQueue": { - "methods": [ - "update_queue" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/py.typed b/owl-bot-staging/v2/google/cloud/tasks_v2/py.typed deleted file mode 100644 index 41f0b1b8..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py deleted file mode 100644 index 4de65971..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py deleted file mode 100644 index 1478acb5..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CloudTasksClient -from .async_client import CloudTasksAsyncClient - -__all__ = ( - 'CloudTasksClient', - 'CloudTasksAsyncClient', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py deleted file mode 100644 index e55241c7..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/async_client.py +++ /dev/null @@ -1,1801 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2.services.cloud_tasks import pagers -from google.cloud.tasks_v2.types import cloudtasks -from google.cloud.tasks_v2.types import queue -from google.cloud.tasks_v2.types import queue as gct_queue -from google.cloud.tasks_v2.types import target -from google.cloud.tasks_v2.types import task -from google.cloud.tasks_v2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport -from .client import CloudTasksClient - - -class CloudTasksAsyncClient: - """Cloud Tasks allows developers to manage the execution of - background work in their applications. - """ - - _client: CloudTasksClient - - DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT - - queue_path = staticmethod(CloudTasksClient.queue_path) - parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) - task_path = staticmethod(CloudTasksClient.task_path) - parse_task_path = staticmethod(CloudTasksClient.parse_task_path) - common_billing_account_path = staticmethod(CloudTasksClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CloudTasksClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CloudTasksClient.common_folder_path) - parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) - common_organization_path = staticmethod(CloudTasksClient.common_organization_path) - parse_common_organization_path = staticmethod(CloudTasksClient.parse_common_organization_path) - common_project_path = staticmethod(CloudTasksClient.common_project_path) - parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) - common_location_path = staticmethod(CloudTasksClient.common_location_path) - parse_common_location_path = staticmethod(CloudTasksClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksAsyncClient: The constructed client. - """ - return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksAsyncClient: The constructed client. - """ - return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudTasksTransport: - """Returns the transport used by the client instance. - - Returns: - CloudTasksTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(CloudTasksClient).get_transport_class, type(CloudTasksClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, CloudTasksTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud tasks client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudTasksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudTasksClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_queues(self, - request: cloudtasks.ListQueuesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListQueuesAsyncPager: - r"""Lists queues. - Queues are returned in lexicographical order. - - Args: - request (:class:`google.cloud.tasks_v2.types.ListQueuesRequest`): - The request object. Request message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - parent (:class:`str`): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.services.cloud_tasks.pagers.ListQueuesAsyncPager: - Response message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ListQueuesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_queues, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListQueuesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_queue(self, - request: cloudtasks.GetQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Gets a queue. - - Args: - request (:class:`google.cloud.tasks_v2.types.GetQueueRequest`): - The request object. Request message for - [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. - name (:class:`str`): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.GetQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_queue(self, - request: cloudtasks.CreateQueueRequest = None, - *, - parent: str = None, - queue: gct_queue.Queue = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2.types.CreateQueueRequest`): - The request object. Request message for - [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. - parent (:class:`str`): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - queue (:class:`google.cloud.tasks_v2.types.Queue`): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2.Queue.name] cannot - be the same as an existing queue. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, queue]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.CreateQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if queue is not None: - request.queue = queue - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_queue, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_queue(self, - request: cloudtasks.UpdateQueueRequest = None, - *, - queue: gct_queue.Queue = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2.types.UpdateQueueRequest`): - The request object. Request message for - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. - queue (:class:`google.cloud.tasks_v2.types.Queue`): - Required. The queue to create or update. - - The queue's [name][google.cloud.tasks.v2.Queue.name] - must be specified. - - Output only fields cannot be modified using UpdateQueue. - Any value specified for an output only field will be - ignored. The queue's - [name][google.cloud.tasks.v2.Queue.name] cannot be - changed. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - A mask used to specify which fields - of the queue are being updated. - If empty, then all fields will be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.UpdateQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_queue, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("queue.name", request.queue.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_queue(self, - request: cloudtasks.DeleteQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2.types.DeleteQueueRequest`): - The request object. Request message for - [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.DeleteQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def purge_queue(self, - request: cloudtasks.PurgeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Args: - request (:class:`google.cloud.tasks_v2.types.PurgeQueueRequest`): - The request object. Request message for - [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.PurgeQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.purge_queue, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def pause_queue(self, - request: cloudtasks.PauseQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2.Queue.state] is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - Args: - request (:class:`google.cloud.tasks_v2.types.PauseQueueRequest`): - The request object. Request message for - [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.PauseQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.pause_queue, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def resume_queue(self, - request: cloudtasks.ResumeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2.Queue.state]; after calling this - method it will be set to - [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Args: - request (:class:`google.cloud.tasks_v2.types.ResumeQueueRequest`): - The request object. Request message for - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ResumeQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.resume_queue, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if - the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2.Queue]. If the resource does not - exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`Sequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_tasks(self, - request: cloudtasks.ListTasksRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksAsyncPager: - r"""Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved - due to performance considerations; - [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Args: - request (:class:`google.cloud.tasks_v2.types.ListTasksRequest`): - The request object. Request message for listing tasks - using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - parent (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.services.cloud_tasks.pagers.ListTasksAsyncPager: - Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ListTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_tasks, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTasksAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_task(self, - request: cloudtasks.GetTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Gets a task. - - Args: - request (:class:`google.cloud.tasks_v2.types.GetTaskRequest`): - The request object. Request message for getting a task - using - [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.GetTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_task(self, - request: cloudtasks.CreateTaskRequest = None, - *, - parent: str = None, - task: gct_task.Task = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_task.Task: - r"""Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Args: - request (:class:`google.cloud.tasks_v2.types.CreateTaskRequest`): - The request object. Request message for - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. - parent (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (:class:`google.cloud.tasks_v2.types.Task`): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2.Task.name]. If a name is - not specified then the system will generate a random - unique task id, which will be set in the task returned - in the [response][google.cloud.tasks.v2.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set - it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task - de-duplication. If a task's ID is identical to that of - an existing task or a task that was deleted or executed - recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour - after the original task was deleted or executed. If the - task's queue was created using queue.yaml or queue.xml, - then another task with the same name can't be created - for ~9days after the original task was deleted or - executed. - - Because there is an extra lookup cost to identify - duplicate task names, these - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id - is recommended. Choosing task ids that are sequential or - have sequential prefixes, for example using a timestamp, - causes an increase in latency and error rates in all - task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store - and serve tasks efficiently. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.CreateTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_task, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_task(self, - request: cloudtasks.DeleteTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Args: - request (:class:`google.cloud.tasks_v2.types.DeleteTaskRequest`): - The request object. Request message for deleting a task - using - [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.DeleteTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def run_task(self, - request: cloudtasks.RunTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can - be used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will - be reset to the time that - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called - plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Args: - request (:class:`google.cloud.tasks_v2.types.RunTaskRequest`): - The request object. Request message for forcing a task - to run now using - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.RunTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_task, - default_timeout=10.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-tasks", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "CloudTasksAsyncClient", -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py deleted file mode 100644 index 1209d0ef..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/client.py +++ /dev/null @@ -1,1940 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2.services.cloud_tasks import pagers -from google.cloud.tasks_v2.types import cloudtasks -from google.cloud.tasks_v2.types import queue -from google.cloud.tasks_v2.types import queue as gct_queue -from google.cloud.tasks_v2.types import target -from google.cloud.tasks_v2.types import task -from google.cloud.tasks_v2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CloudTasksGrpcTransport -from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport - - -class CloudTasksClientMeta(type): - """Metaclass for the CloudTasks client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] - _transport_registry["grpc"] = CloudTasksGrpcTransport - _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[CloudTasksTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudTasksClient(metaclass=CloudTasksClientMeta): - """Cloud Tasks allows developers to manage the execution of - background work in their applications. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudTasksTransport: - """Returns the transport used by the client instance. - - Returns: - CloudTasksTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def queue_path(project: str,location: str,queue: str,) -> str: - """Returns a fully-qualified queue string.""" - return "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) - - @staticmethod - def parse_queue_path(path: str) -> Dict[str,str]: - """Parses a queue path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_path(project: str,location: str,queue: str,task: str,) -> str: - """Returns a fully-qualified task string.""" - return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) - - @staticmethod - def parse_task_path(path: str) -> Dict[str,str]: - """Parses a task path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudTasksTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud tasks client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudTasksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudTasksTransport): - # transport is a CloudTasksTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - ) - - def list_queues(self, - request: cloudtasks.ListQueuesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListQueuesPager: - r"""Lists queues. - Queues are returned in lexicographical order. - - Args: - request (google.cloud.tasks_v2.types.ListQueuesRequest): - The request object. Request message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - parent (str): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.services.cloud_tasks.pagers.ListQueuesPager: - Response message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ListQueuesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ListQueuesRequest): - request = cloudtasks.ListQueuesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_queues] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListQueuesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_queue(self, - request: cloudtasks.GetQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Gets a queue. - - Args: - request (google.cloud.tasks_v2.types.GetQueueRequest): - The request object. Request message for - [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. - name (str): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.GetQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.GetQueueRequest): - request = cloudtasks.GetQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_queue(self, - request: cloudtasks.CreateQueueRequest = None, - *, - parent: str = None, - queue: gct_queue.Queue = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2.types.CreateQueueRequest): - The request object. Request message for - [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. - parent (str): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - queue (google.cloud.tasks_v2.types.Queue): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2.Queue.name] cannot - be the same as an existing queue. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, queue]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.CreateQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.CreateQueueRequest): - request = cloudtasks.CreateQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if queue is not None: - request.queue = queue - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_queue(self, - request: cloudtasks.UpdateQueueRequest = None, - *, - queue: gct_queue.Queue = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2.types.UpdateQueueRequest): - The request object. Request message for - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. - queue (google.cloud.tasks_v2.types.Queue): - Required. The queue to create or update. - - The queue's [name][google.cloud.tasks.v2.Queue.name] - must be specified. - - Output only fields cannot be modified using UpdateQueue. - Any value specified for an output only field will be - ignored. The queue's - [name][google.cloud.tasks.v2.Queue.name] cannot be - changed. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask used to specify which fields - of the queue are being updated. - If empty, then all fields will be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.UpdateQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.UpdateQueueRequest): - request = cloudtasks.UpdateQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("queue.name", request.queue.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_queue(self, - request: cloudtasks.DeleteQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2.types.DeleteQueueRequest): - The request object. Request message for - [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.DeleteQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.DeleteQueueRequest): - request = cloudtasks.DeleteQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def purge_queue(self, - request: cloudtasks.PurgeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Args: - request (google.cloud.tasks_v2.types.PurgeQueueRequest): - The request object. Request message for - [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.PurgeQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.PurgeQueueRequest): - request = cloudtasks.PurgeQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.purge_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def pause_queue(self, - request: cloudtasks.PauseQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2.Queue.state] is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - Args: - request (google.cloud.tasks_v2.types.PauseQueueRequest): - The request object. Request message for - [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.PauseQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.PauseQueueRequest): - request = cloudtasks.PauseQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.pause_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def resume_queue(self, - request: cloudtasks.ResumeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2.Queue.state]; after calling this - method it will be set to - [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Args: - request (google.cloud.tasks_v2.types.ResumeQueueRequest): - The request object. Request message for - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ResumeQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ResumeQueueRequest): - request = cloudtasks.ResumeQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resume_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if - the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Args: - request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for `GetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Args: - request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for `SetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2.Queue]. If the resource does not - exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Args: - request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for - `TestIamPermissions` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (Sequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_tasks(self, - request: cloudtasks.ListTasksRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksPager: - r"""Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved - due to performance considerations; - [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Args: - request (google.cloud.tasks_v2.types.ListTasksRequest): - The request object. Request message for listing tasks - using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.services.cloud_tasks.pagers.ListTasksPager: - Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ListTasksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ListTasksRequest): - request = cloudtasks.ListTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTasksPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_task(self, - request: cloudtasks.GetTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Gets a task. - - Args: - request (google.cloud.tasks_v2.types.GetTaskRequest): - The request object. Request message for getting a task - using - [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.GetTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.GetTaskRequest): - request = cloudtasks.GetTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_task(self, - request: cloudtasks.CreateTaskRequest = None, - *, - parent: str = None, - task: gct_task.Task = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_task.Task: - r"""Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Args: - request (google.cloud.tasks_v2.types.CreateTaskRequest): - The request object. Request message for - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (google.cloud.tasks_v2.types.Task): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2.Task.name]. If a name is - not specified then the system will generate a random - unique task id, which will be set in the task returned - in the [response][google.cloud.tasks.v2.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set - it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task - de-duplication. If a task's ID is identical to that of - an existing task or a task that was deleted or executed - recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour - after the original task was deleted or executed. If the - task's queue was created using queue.yaml or queue.xml, - then another task with the same name can't be created - for ~9days after the original task was deleted or - executed. - - Because there is an extra lookup cost to identify - duplicate task names, these - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id - is recommended. Choosing task ids that are sequential or - have sequential prefixes, for example using a timestamp, - causes an increase in latency and error rates in all - task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store - and serve tasks efficiently. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.CreateTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.CreateTaskRequest): - request = cloudtasks.CreateTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_task(self, - request: cloudtasks.DeleteTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Args: - request (google.cloud.tasks_v2.types.DeleteTaskRequest): - The request object. Request message for deleting a task - using - [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.DeleteTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.DeleteTaskRequest): - request = cloudtasks.DeleteTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def run_task(self, - request: cloudtasks.RunTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can - be used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will - be reset to the time that - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called - plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Args: - request (google.cloud.tasks_v2.types.RunTaskRequest): - The request object. Request message for forcing a task - to run now using - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.RunTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.RunTaskRequest): - request = cloudtasks.RunTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-tasks", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "CloudTasksClient", -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py deleted file mode 100644 index b8bff650..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/pagers.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.tasks_v2.types import cloudtasks -from google.cloud.tasks_v2.types import queue -from google.cloud.tasks_v2.types import task - - -class ListQueuesPager: - """A pager for iterating through ``list_queues`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2.types.ListQueuesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``queues`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListQueues`` requests and continue to iterate - through the ``queues`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2.types.ListQueuesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudtasks.ListQueuesResponse], - request: cloudtasks.ListQueuesRequest, - response: cloudtasks.ListQueuesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2.types.ListQueuesRequest): - The initial request object. - response (google.cloud.tasks_v2.types.ListQueuesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListQueuesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[queue.Queue]: - for page in self.pages: - yield from page.queues - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListQueuesAsyncPager: - """A pager for iterating through ``list_queues`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2.types.ListQueuesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``queues`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListQueues`` requests and continue to iterate - through the ``queues`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2.types.ListQueuesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], - request: cloudtasks.ListQueuesRequest, - response: cloudtasks.ListQueuesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2.types.ListQueuesRequest): - The initial request object. - response (google.cloud.tasks_v2.types.ListQueuesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListQueuesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[queue.Queue]: - async def async_generator(): - async for page in self.pages: - for response in page.queues: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2.types.ListTasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudtasks.ListTasksResponse], - request: cloudtasks.ListTasksRequest, - response: cloudtasks.ListTasksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2.types.ListTasksRequest): - The initial request object. - response (google.cloud.tasks_v2.types.ListTasksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListTasksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[task.Task]: - for page in self.pages: - yield from page.tasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksAsyncPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2.types.ListTasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], - request: cloudtasks.ListTasksRequest, - response: cloudtasks.ListTasksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2.types.ListTasksRequest): - The initial request object. - response (google.cloud.tasks_v2.types.ListTasksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListTasksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[task.Task]: - async def async_generator(): - async for page in self.pages: - for response in page.tasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py deleted file mode 100644 index 3db96829..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudTasksTransport -from .grpc import CloudTasksGrpcTransport -from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] -_transport_registry['grpc'] = CloudTasksGrpcTransport -_transport_registry['grpc_asyncio'] = CloudTasksGrpcAsyncIOTransport - -__all__ = ( - 'CloudTasksTransport', - 'CloudTasksGrpcTransport', - 'CloudTasksGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py deleted file mode 100644 index 24108ca8..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py +++ /dev/null @@ -1,441 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2.types import cloudtasks -from google.cloud.tasks_v2.types import queue -from google.cloud.tasks_v2.types import queue as gct_queue -from google.cloud.tasks_v2.types import task -from google.cloud.tasks_v2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-tasks', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class CloudTasksTransport(abc.ABC): - """Abstract transport class for CloudTasks.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudtasks.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_queues: gapic_v1.method.wrap_method( - self.list_queues, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.get_queue: gapic_v1.method.wrap_method( - self.get_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.create_queue: gapic_v1.method.wrap_method( - self.create_queue, - default_timeout=10.0, - client_info=client_info, - ), - self.update_queue: gapic_v1.method.wrap_method( - self.update_queue, - default_timeout=10.0, - client_info=client_info, - ), - self.delete_queue: gapic_v1.method.wrap_method( - self.delete_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.purge_queue: gapic_v1.method.wrap_method( - self.purge_queue, - default_timeout=10.0, - client_info=client_info, - ), - self.pause_queue: gapic_v1.method.wrap_method( - self.pause_queue, - default_timeout=10.0, - client_info=client_info, - ), - self.resume_queue: gapic_v1.method.wrap_method( - self.resume_queue, - default_timeout=10.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=10.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.list_tasks: gapic_v1.method.wrap_method( - self.list_tasks, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.get_task: gapic_v1.method.wrap_method( - self.get_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.create_task: gapic_v1.method.wrap_method( - self.create_task, - default_timeout=10.0, - client_info=client_info, - ), - self.delete_task: gapic_v1.method.wrap_method( - self.delete_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=10.0, - ), - default_timeout=10.0, - client_info=client_info, - ), - self.run_task: gapic_v1.method.wrap_method( - self.run_task, - default_timeout=10.0, - client_info=client_info, - ), - } - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - Union[ - cloudtasks.ListQueuesResponse, - Awaitable[cloudtasks.ListQueuesResponse] - ]]: - raise NotImplementedError() - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - Union[ - gct_queue.Queue, - Awaitable[gct_queue.Queue] - ]]: - raise NotImplementedError() - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - Union[ - gct_queue.Queue, - Awaitable[gct_queue.Queue] - ]]: - raise NotImplementedError() - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - Union[ - cloudtasks.ListTasksResponse, - Awaitable[cloudtasks.ListTasksResponse] - ]]: - raise NotImplementedError() - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - Union[ - gct_task.Task, - Awaitable[gct_task.Task] - ]]: - raise NotImplementedError() - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'CloudTasksTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py deleted file mode 100644 index bc22a2f2..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py +++ /dev/null @@ -1,779 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.tasks_v2.types import cloudtasks -from google.cloud.tasks_v2.types import queue -from google.cloud.tasks_v2.types import queue as gct_queue -from google.cloud.tasks_v2.types import task -from google.cloud.tasks_v2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO - - -class CloudTasksGrpcTransport(CloudTasksTransport): - """gRPC backend transport for CloudTasks. - - Cloud Tasks allows developers to manage the execution of - background work in their applications. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - cloudtasks.ListQueuesResponse]: - r"""Return a callable for the list queues method over gRPC. - - Lists queues. - Queues are returned in lexicographical order. - - Returns: - Callable[[~.ListQueuesRequest], - ~.ListQueuesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_queues' not in self._stubs: - self._stubs['list_queues'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/ListQueues', - request_serializer=cloudtasks.ListQueuesRequest.serialize, - response_deserializer=cloudtasks.ListQueuesResponse.deserialize, - ) - return self._stubs['list_queues'] - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - queue.Queue]: - r"""Return a callable for the get queue method over gRPC. - - Gets a queue. - - Returns: - Callable[[~.GetQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_queue' not in self._stubs: - self._stubs['get_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/GetQueue', - request_serializer=cloudtasks.GetQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['get_queue'] - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - gct_queue.Queue]: - r"""Return a callable for the create queue method over gRPC. - - Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.CreateQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_queue' not in self._stubs: - self._stubs['create_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/CreateQueue', - request_serializer=cloudtasks.CreateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['create_queue'] - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - gct_queue.Queue]: - r"""Return a callable for the update queue method over gRPC. - - Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.UpdateQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_queue' not in self._stubs: - self._stubs['update_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/UpdateQueue', - request_serializer=cloudtasks.UpdateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['update_queue'] - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete queue method over gRPC. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.DeleteQueueRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_queue' not in self._stubs: - self._stubs['delete_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/DeleteQueue', - request_serializer=cloudtasks.DeleteQueueRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_queue'] - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - queue.Queue]: - r"""Return a callable for the purge queue method over gRPC. - - Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Returns: - Callable[[~.PurgeQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'purge_queue' not in self._stubs: - self._stubs['purge_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/PurgeQueue', - request_serializer=cloudtasks.PurgeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['purge_queue'] - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - queue.Queue]: - r"""Return a callable for the pause queue method over gRPC. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2.Queue.state] is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - Returns: - Callable[[~.PauseQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pause_queue' not in self._stubs: - self._stubs['pause_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/PauseQueue', - request_serializer=cloudtasks.PauseQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['pause_queue'] - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - queue.Queue]: - r"""Return a callable for the resume queue method over gRPC. - - Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2.Queue.state]; after calling this - method it will be set to - [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Returns: - Callable[[~.ResumeQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_queue' not in self._stubs: - self._stubs['resume_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/ResumeQueue', - request_serializer=cloudtasks.ResumeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['resume_queue'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if - the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2.Queue]. If the resource does not - exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - cloudtasks.ListTasksResponse]: - r"""Return a callable for the list tasks method over gRPC. - - Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved - due to performance considerations; - [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Returns: - Callable[[~.ListTasksRequest], - ~.ListTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/ListTasks', - request_serializer=cloudtasks.ListTasksRequest.serialize, - response_deserializer=cloudtasks.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - task.Task]: - r"""Return a callable for the get task method over gRPC. - - Gets a task. - - Returns: - Callable[[~.GetTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/GetTask', - request_serializer=cloudtasks.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - gct_task.Task]: - r"""Return a callable for the create task method over gRPC. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Returns: - Callable[[~.CreateTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/CreateTask', - request_serializer=cloudtasks.CreateTaskRequest.serialize, - response_deserializer=gct_task.Task.deserialize, - ) - return self._stubs['create_task'] - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete task method over gRPC. - - Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Returns: - Callable[[~.DeleteTaskRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/DeleteTask', - request_serializer=cloudtasks.DeleteTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_task'] - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - task.Task]: - r"""Return a callable for the run task method over gRPC. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can - be used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will - be reset to the time that - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called - plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Returns: - Callable[[~.RunTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/RunTask', - request_serializer=cloudtasks.RunTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['run_task'] - - -__all__ = ( - 'CloudTasksGrpcTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py deleted file mode 100644 index c7ee527a..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py +++ /dev/null @@ -1,783 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.tasks_v2.types import cloudtasks -from google.cloud.tasks_v2.types import queue -from google.cloud.tasks_v2.types import queue as gct_queue -from google.cloud.tasks_v2.types import task -from google.cloud.tasks_v2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .grpc import CloudTasksGrpcTransport - - -class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): - """gRPC AsyncIO backend transport for CloudTasks. - - Cloud Tasks allows developers to manage the execution of - background work in their applications. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - Awaitable[cloudtasks.ListQueuesResponse]]: - r"""Return a callable for the list queues method over gRPC. - - Lists queues. - Queues are returned in lexicographical order. - - Returns: - Callable[[~.ListQueuesRequest], - Awaitable[~.ListQueuesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_queues' not in self._stubs: - self._stubs['list_queues'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/ListQueues', - request_serializer=cloudtasks.ListQueuesRequest.serialize, - response_deserializer=cloudtasks.ListQueuesResponse.deserialize, - ) - return self._stubs['list_queues'] - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the get queue method over gRPC. - - Gets a queue. - - Returns: - Callable[[~.GetQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_queue' not in self._stubs: - self._stubs['get_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/GetQueue', - request_serializer=cloudtasks.GetQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['get_queue'] - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - Awaitable[gct_queue.Queue]]: - r"""Return a callable for the create queue method over gRPC. - - Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.CreateQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_queue' not in self._stubs: - self._stubs['create_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/CreateQueue', - request_serializer=cloudtasks.CreateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['create_queue'] - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - Awaitable[gct_queue.Queue]]: - r"""Return a callable for the update queue method over gRPC. - - Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.UpdateQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_queue' not in self._stubs: - self._stubs['update_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/UpdateQueue', - request_serializer=cloudtasks.UpdateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['update_queue'] - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete queue method over gRPC. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.DeleteQueueRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_queue' not in self._stubs: - self._stubs['delete_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/DeleteQueue', - request_serializer=cloudtasks.DeleteQueueRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_queue'] - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the purge queue method over gRPC. - - Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Returns: - Callable[[~.PurgeQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'purge_queue' not in self._stubs: - self._stubs['purge_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/PurgeQueue', - request_serializer=cloudtasks.PurgeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['purge_queue'] - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the pause queue method over gRPC. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2.Queue.state] is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - Returns: - Callable[[~.PauseQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pause_queue' not in self._stubs: - self._stubs['pause_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/PauseQueue', - request_serializer=cloudtasks.PauseQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['pause_queue'] - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the resume queue method over gRPC. - - Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2.Queue.state]; after calling this - method it will be set to - [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Returns: - Callable[[~.ResumeQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_queue' not in self._stubs: - self._stubs['resume_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/ResumeQueue', - request_serializer=cloudtasks.ResumeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['resume_queue'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if - the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a - [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2.Queue]. If the resource does not - exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - Awaitable[cloudtasks.ListTasksResponse]]: - r"""Return a callable for the list tasks method over gRPC. - - Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved - due to performance considerations; - [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Returns: - Callable[[~.ListTasksRequest], - Awaitable[~.ListTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/ListTasks', - request_serializer=cloudtasks.ListTasksRequest.serialize, - response_deserializer=cloudtasks.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the get task method over gRPC. - - Gets a task. - - Returns: - Callable[[~.GetTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/GetTask', - request_serializer=cloudtasks.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - Awaitable[gct_task.Task]]: - r"""Return a callable for the create task method over gRPC. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Returns: - Callable[[~.CreateTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/CreateTask', - request_serializer=cloudtasks.CreateTaskRequest.serialize, - response_deserializer=gct_task.Task.deserialize, - ) - return self._stubs['create_task'] - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete task method over gRPC. - - Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Returns: - Callable[[~.DeleteTaskRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/DeleteTask', - request_serializer=cloudtasks.DeleteTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_task'] - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the run task method over gRPC. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can - be used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will - be reset to the time that - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called - plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Returns: - Callable[[~.RunTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2.CloudTasks/RunTask', - request_serializer=cloudtasks.RunTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['run_task'] - - -__all__ = ( - 'CloudTasksGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py deleted file mode 100644 index 5c3ae8fb..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/types/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloudtasks import ( - CreateQueueRequest, - CreateTaskRequest, - DeleteQueueRequest, - DeleteTaskRequest, - GetQueueRequest, - GetTaskRequest, - ListQueuesRequest, - ListQueuesResponse, - ListTasksRequest, - ListTasksResponse, - PauseQueueRequest, - PurgeQueueRequest, - ResumeQueueRequest, - RunTaskRequest, - UpdateQueueRequest, -) -from .queue import ( - Queue, - RateLimits, - RetryConfig, - StackdriverLoggingConfig, -) -from .target import ( - AppEngineHttpRequest, - AppEngineRouting, - HttpRequest, - OAuthToken, - OidcToken, - HttpMethod, -) -from .task import ( - Attempt, - Task, -) - -__all__ = ( - 'CreateQueueRequest', - 'CreateTaskRequest', - 'DeleteQueueRequest', - 'DeleteTaskRequest', - 'GetQueueRequest', - 'GetTaskRequest', - 'ListQueuesRequest', - 'ListQueuesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'PauseQueueRequest', - 'PurgeQueueRequest', - 'ResumeQueueRequest', - 'RunTaskRequest', - 'UpdateQueueRequest', - 'Queue', - 'RateLimits', - 'RetryConfig', - 'StackdriverLoggingConfig', - 'AppEngineHttpRequest', - 'AppEngineRouting', - 'HttpRequest', - 'OAuthToken', - 'OidcToken', - 'HttpMethod', - 'Attempt', - 'Task', -) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py deleted file mode 100644 index 953edc65..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/types/cloudtasks.py +++ /dev/null @@ -1,558 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2.types import queue as gct_queue -from google.cloud.tasks_v2.types import task as gct_task -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2', - manifest={ - 'ListQueuesRequest', - 'ListQueuesResponse', - 'GetQueueRequest', - 'CreateQueueRequest', - 'UpdateQueueRequest', - 'DeleteQueueRequest', - 'PurgeQueueRequest', - 'PauseQueueRequest', - 'ResumeQueueRequest', - 'ListTasksRequest', - 'ListTasksResponse', - 'GetTaskRequest', - 'CreateTaskRequest', - 'DeleteTaskRequest', - 'RunTaskRequest', - }, -) - - -class ListQueuesRequest(proto.Message): - r"""Request message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - - Attributes: - parent (str): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter (str): - ``filter`` can be used to specify a subset of queues. Any - [Queue][google.cloud.tasks.v2.Queue] field can be used as a - filter and several operators as supported. For example: - ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver's Advanced Logs - Filters `__. - - Sample filter "state: PAUSED". - - Note that using filters might cause fewer queues than the - requested page_size to be returned. - page_size (int): - Requested page size. - - The maximum page size is 9800. If unspecified, the page size - will be the maximum. Fewer queues than requested might be - returned, even if more queues exist; use the - [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] - in the response to determine if more queues exist. - page_token (str): - A token identifying the page of results to return. - - To request the first page results, page_token must be empty. - To request the next page of results, page_token must be the - value of - [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] - returned from the previous call to - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] - method. It is an error to switch the value of the - [filter][google.cloud.tasks.v2.ListQueuesRequest.filter] - while iterating through pages. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListQueuesResponse(proto.Message): - r"""Response message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - - Attributes: - queues (Sequence[google.cloud.tasks_v2.types.Queue]): - The list of queues. - next_page_token (str): - A token to retrieve next page of results. - - To return the next page of results, call - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] - with this value as the - [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token]. - - If the next_page_token is empty, there are no more results. - - The page token is valid for only 2 hours. - """ - - @property - def raw_page(self): - return self - - queues = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gct_queue.Queue, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetQueueRequest(proto.Message): - r"""Request message for - [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. - - Attributes: - name (str): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateQueueRequest(proto.Message): - r"""Request message for - [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. - - Attributes: - parent (str): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - queue (google.cloud.tasks_v2.types.Queue): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2.Queue.name] cannot be - the same as an existing queue. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - queue = proto.Field( - proto.MESSAGE, - number=2, - message=gct_queue.Queue, - ) - - -class UpdateQueueRequest(proto.Message): - r"""Request message for - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. - - Attributes: - queue (google.cloud.tasks_v2.types.Queue): - Required. The queue to create or update. - - The queue's [name][google.cloud.tasks.v2.Queue.name] must be - specified. - - Output only fields cannot be modified using UpdateQueue. Any - value specified for an output only field will be ignored. - The queue's [name][google.cloud.tasks.v2.Queue.name] cannot - be changed. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask used to specify which fields of the - queue are being updated. - If empty, then all fields will be updated. - """ - - queue = proto.Field( - proto.MESSAGE, - number=1, - message=gct_queue.Queue, - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteQueueRequest(proto.Message): - r"""Request message for - [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class PurgeQueueRequest(proto.Message): - r"""Request message for - [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class PauseQueueRequest(proto.Message): - r"""Request message for - [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ResumeQueueRequest(proto.Message): - r"""Request message for - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTasksRequest(proto.Message): - r"""Request message for listing tasks using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - - Attributes: - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (google.cloud.tasks_v2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - page_size (int): - Maximum page size. - - Fewer tasks than requested might be returned, even if more - tasks exist; use - [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] - in the response to determine if more tasks exist. - - The maximum page size is 1000. If unspecified, the page size - will be the maximum. - page_token (str): - A token identifying the page of results to return. - - To request the first page results, page_token must be empty. - To request the next page of results, page_token must be the - value of - [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] - returned from the previous call to - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] - method. - - The page token is valid for only 2 hours. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListTasksResponse(proto.Message): - r"""Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - - Attributes: - tasks (Sequence[google.cloud.tasks_v2.types.Task]): - The list of tasks. - next_page_token (str): - A token to retrieve next page of results. - - To return the next page of results, call - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] with - this value as the - [page_token][google.cloud.tasks.v2.ListTasksRequest.page_token]. - - If the next_page_token is empty, there are no more results. - """ - - @property - def raw_page(self): - return self - - tasks = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gct_task.Task, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetTaskRequest(proto.Message): - r"""Request message for getting a task using - [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (google.cloud.tasks_v2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - - -class CreateTaskRequest(proto.Message): - r"""Request message for - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. - - Attributes: - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - task (google.cloud.tasks_v2.types.Task): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2.Task.name]. If a name is not - specified then the system will generate a random unique task - id, which will be set in the task returned in the - [response][google.cloud.tasks.v2.Task.name]. - - If [schedule_time][google.cloud.tasks.v2.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set it to - the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task de-duplication. - If a task's ID is identical to that of an existing task or a - task that was deleted or executed recently then the call - will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour after - the original task was deleted or executed. If the task's - queue was created using queue.yaml or queue.xml, then - another task with the same name can't be created for ~9days - after the original task was deleted or executed. - - Because there is an extra lookup cost to identify duplicate - task names, these - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have - sequential prefixes, for example using a timestamp, causes - an increase in latency and error rates in all task commands. - The infrastructure relies on an approximately uniform - distribution of task ids to store and serve tasks - efficiently. - response_view (google.cloud.tasks_v2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - task = proto.Field( - proto.MESSAGE, - number=2, - message=gct_task.Task, - ) - response_view = proto.Field( - proto.ENUM, - number=3, - enum=gct_task.Task.View, - ) - - -class DeleteTaskRequest(proto.Message): - r"""Request message for deleting a task using - [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class RunTaskRequest(proto.Message): - r"""Request message for forcing a task to run now using - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (google.cloud.tasks_v2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py deleted file mode 100644 index 6bba9e15..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/types/queue.py +++ /dev/null @@ -1,434 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2.types import target -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2', - manifest={ - 'Queue', - 'RateLimits', - 'RetryConfig', - 'StackdriverLoggingConfig', - }, -) - - -class Queue(proto.Message): - r"""A queue is a container of related tasks. Queues are - configured to manage how those tasks are dispatched. - Configurable properties include rate limits, retry options, - queue types, and others. - - Attributes: - name (str): - Caller-specified and required in - [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue], - after which it becomes output only. - - The queue name. - - The queue name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For - more information, see `Identifying - projects `__ - - ``LOCATION_ID`` is the canonical ID for the queue's - location. The list of available locations can be obtained - by calling - [ListLocations][google.cloud.location.Locations.ListLocations]. - For more information, see - https://cloud.google.com/about/locations/. - - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), or hyphens (-). The maximum length is 100 - characters. - app_engine_routing_override (google.cloud.tasks_v2.types.AppEngineRouting): - Overrides for [task-level - app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. - These settings apply only to [App Engine - tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this - queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are - not affected. - - If set, ``app_engine_routing_override`` is used for all [App - Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in - the queue, no matter what the setting is for the [task-level - app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. - rate_limits (google.cloud.tasks_v2.types.RateLimits): - Rate limits for task dispatches. - - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] and - [retry_config][google.cloud.tasks.v2.Queue.retry_config] are - related because they both control task attempts. However - they control task attempts in different ways: - - - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] - controls the total rate of dispatches from a queue (i.e. - all traffic dispatched from the queue, regardless of - whether the dispatch is from a first attempt or a retry). - - [retry_config][google.cloud.tasks.v2.Queue.retry_config] - controls what happens to particular a task after its - first attempt fails. That is, - [retry_config][google.cloud.tasks.v2.Queue.retry_config] - controls task retries (the second attempt, third attempt, - etc). - - The queue's actual dispatch rate is the result of: - - - Number of tasks in the queue - - User-specified throttling: - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits], - [retry_config][google.cloud.tasks.v2.Queue.retry_config], - and the [queue's - state][google.cloud.tasks.v2.Queue.state]. - - System throttling due to ``429`` (Too Many Requests) or - ``503`` (Service Unavailable) responses from the worker, - high error rates, or to smooth sudden large traffic - spikes. - retry_config (google.cloud.tasks_v2.types.RetryConfig): - Settings that determine the retry behavior. - - - For tasks created using Cloud Tasks: the queue-level - retry settings apply to all tasks in the queue that were - created using Cloud Tasks. Retry settings cannot be set - on individual tasks. - - For tasks created using the App Engine SDK: the - queue-level retry settings apply to all tasks in the - queue which do not have retry settings explicitly set on - the task and were created by the App Engine SDK. See `App - Engine - documentation `__. - state (google.cloud.tasks_v2.types.Queue.State): - Output only. The state of the queue. - - ``state`` can only be changed by called - [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue], - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue], - or uploading - `queue.yaml/xml `__. - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] - cannot be used to change ``state``. - purge_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last time this queue was purged. - - All tasks that were - [created][google.cloud.tasks.v2.Task.create_time] before - this time were purged. - - A queue can be purged using - [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue], - the `App Engine Task Queue SDK, or the Cloud - Console `__. - - Purge time will be truncated to the nearest microsecond. - Purge time will be unset if the queue has never been purged. - stackdriver_logging_config (google.cloud.tasks_v2.types.StackdriverLoggingConfig): - Configuration options for writing logs to `Stackdriver - Logging `__. If this - field is unset, then no logs are written. - """ - class State(proto.Enum): - r"""State of the queue.""" - STATE_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - DISABLED = 3 - - name = proto.Field( - proto.STRING, - number=1, - ) - app_engine_routing_override = proto.Field( - proto.MESSAGE, - number=2, - message=target.AppEngineRouting, - ) - rate_limits = proto.Field( - proto.MESSAGE, - number=3, - message='RateLimits', - ) - retry_config = proto.Field( - proto.MESSAGE, - number=4, - message='RetryConfig', - ) - state = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - purge_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - stackdriver_logging_config = proto.Field( - proto.MESSAGE, - number=9, - message='StackdriverLoggingConfig', - ) - - -class RateLimits(proto.Message): - r"""Rate limits. - - This message determines the maximum rate that tasks can be - dispatched by a queue, regardless of whether the dispatch is a first - task attempt or a retry. - - Note: The debugging command, - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask], will run a task - even if the queue has reached its - [RateLimits][google.cloud.tasks.v2.RateLimits]. - - Attributes: - max_dispatches_per_second (float): - The maximum rate at which tasks are dispatched from this - queue. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - - The maximum allowed value is 500. - - This field has the same meaning as `rate in - queue.yaml/xml `__. - max_burst_size (int): - Output only. The max burst size. - - Max burst size limits how fast tasks in queue are processed - when many tasks are in the queue and the rate is high. This - field allows the queue to have a high rate so processing - starts shortly after a task is enqueued, but still limits - resource usage when many tasks are enqueued in a short - period of time. - - The `token - bucket `__ - algorithm is used to control the rate of task dispatches. - Each queue has a token bucket that holds tokens, up to the - maximum specified by ``max_burst_size``. Each time a task is - dispatched, a token is removed from the bucket. Tasks will - be dispatched until the queue's bucket runs out of tokens. - The bucket will be continuously refilled with new tokens - based on - [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. - - Cloud Tasks will pick the value of ``max_burst_size`` based - on the value of - [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. - - For queues that were created or updated using - ``queue.yaml/xml``, ``max_burst_size`` is equal to - `bucket_size `__. - Since ``max_burst_size`` is output only, if - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] - is called on a queue created by ``queue.yaml/xml``, - ``max_burst_size`` will be reset based on the value of - [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second], - regardless of whether - [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second] - is updated. - max_concurrent_dispatches (int): - The maximum number of concurrent tasks that Cloud Tasks - allows to be dispatched for this queue. After this threshold - has been reached, Cloud Tasks stops dispatching tasks until - the number of concurrent requests decreases. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - The maximum allowed value is 5,000. - - This field has the same meaning as `max_concurrent_requests - in - queue.yaml/xml `__. - """ - - max_dispatches_per_second = proto.Field( - proto.DOUBLE, - number=1, - ) - max_burst_size = proto.Field( - proto.INT32, - number=2, - ) - max_concurrent_dispatches = proto.Field( - proto.INT32, - number=3, - ) - - -class RetryConfig(proto.Message): - r"""Retry config. - These settings determine when a failed task attempt is retried. - - Attributes: - max_attempts (int): - Number of attempts per task. - - Cloud Tasks will attempt the task ``max_attempts`` times - (that is, if the first attempt fails, then there will be - ``max_attempts - 1`` retries). Must be >= -1. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - -1 indicates unlimited attempts. - - This field has the same meaning as `task_retry_limit in - queue.yaml/xml `__. - max_retry_duration (google.protobuf.duration_pb2.Duration): - If positive, ``max_retry_duration`` specifies the time limit - for retrying a failed task, measured from when the task was - first attempted. Once ``max_retry_duration`` time has passed - *and* the task has been attempted - [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] - times, no further attempts will be made and the task will be - deleted. - - If zero, then the task age is unlimited. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - ``max_retry_duration`` will be truncated to the nearest - second. - - This field has the same meaning as `task_age_limit in - queue.yaml/xml `__. - min_backoff (google.protobuf.duration_pb2.Duration): - A task will be - [scheduled][google.cloud.tasks.v2.Task.schedule_time] for - retry between - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - duration after it fails, if the queue's - [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies - that the task should be retried. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - ``min_backoff`` will be truncated to the nearest second. - - This field has the same meaning as `min_backoff_seconds in - queue.yaml/xml `__. - max_backoff (google.protobuf.duration_pb2.Duration): - A task will be - [scheduled][google.cloud.tasks.v2.Task.schedule_time] for - retry between - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - duration after it fails, if the queue's - [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies - that the task should be retried. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - ``max_backoff`` will be truncated to the nearest second. - - This field has the same meaning as `max_backoff_seconds in - queue.yaml/xml `__. - max_doublings (int): - The time between retries will double ``max_doublings`` - times. - - A task's retry interval starts at - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff], - then doubles ``max_doublings`` times, then increases - linearly, and finally retries retries at intervals of - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - up to - [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] - times. - - For example, if - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] - is 10s, - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - is 300s, and ``max_doublings`` is 3, then the a task will - first be retried in 10s. The retry interval will double - three times, and then increase linearly by 2^3 \* 10s. - Finally, the task will retry at intervals of - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - until the task has been attempted - [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] - times. Thus, the requests will retry at 10s, 20s, 40s, 80s, - 160s, 240s, 300s, 300s, .... - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - This field has the same meaning as `max_doublings in - queue.yaml/xml `__. - """ - - max_attempts = proto.Field( - proto.INT32, - number=1, - ) - max_retry_duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - min_backoff = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - max_backoff = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - max_doublings = proto.Field( - proto.INT32, - number=5, - ) - - -class StackdriverLoggingConfig(proto.Message): - r"""Configuration options for writing logs to `Stackdriver - Logging `__. - - Attributes: - sampling_ratio (float): - Specifies the fraction of operations to write to - `Stackdriver - Logging `__. This - field may contain any value between 0.0 and 1.0, inclusive. - 0.0 is the default and means that no operations are logged. - """ - - sampling_ratio = proto.Field( - proto.DOUBLE, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py deleted file mode 100644 index c2573691..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/types/target.py +++ /dev/null @@ -1,548 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2', - manifest={ - 'HttpMethod', - 'HttpRequest', - 'AppEngineHttpRequest', - 'AppEngineRouting', - 'OAuthToken', - 'OidcToken', - }, -) - - -class HttpMethod(proto.Enum): - r"""The HTTP method used to deliver the task.""" - HTTP_METHOD_UNSPECIFIED = 0 - POST = 1 - GET = 2 - HEAD = 3 - PUT = 4 - DELETE = 5 - PATCH = 6 - OPTIONS = 7 - - -class HttpRequest(proto.Message): - r"""HTTP request. - - The task will be pushed to the worker as an HTTP request. If the - worker or the redirected worker acknowledges the task by returning a - successful HTTP response code ([``200`` - ``299``]), the task will - be removed from the queue. If any other HTTP response code is - returned or no response is received, the task will be retried - according to the following: - - - User-specified throttling: [retry - configuration][google.cloud.tasks.v2.Queue.retry_config], [rate - limits][google.cloud.tasks.v2.Queue.rate_limits], and the - [queue's state][google.cloud.tasks.v2.Queue.state]. - - - System throttling: To prevent the worker from overloading, Cloud - Tasks may temporarily reduce the queue's effective rate. - User-specified settings will not be changed. - - System throttling happens because: - - - Cloud Tasks backs off on all errors. Normally the backoff - specified in [rate - limits][google.cloud.tasks.v2.Queue.rate_limits] will be used. - But if the worker returns ``429`` (Too Many Requests), ``503`` - (Service Unavailable), or the rate of errors is high, Cloud Tasks - will use a higher backoff rate. The retry specified in the - ``Retry-After`` HTTP response header is considered. - - - To prevent traffic spikes and to smooth sudden increases in - traffic, dispatches ramp up slowly when the queue is newly - created or idle and if large numbers of tasks suddenly become - available to dispatch (due to spikes in create task rates, the - queue being unpaused, or many tasks that are scheduled at the - same time). - - Attributes: - url (str): - Required. The full url path that the request will be sent - to. - - This string must begin with either "http://" or "https://". - Some examples are: ``http://acme.com`` and - ``https://acme.com/sales:8080``. Cloud Tasks will encode - some characters for safety and compatibility. The maximum - allowed URL length is 2083 characters after encoding. - - The ``Location`` header response from a redirect response - [``300`` - ``399``] may be followed. The redirect is not - counted as a separate attempt. - http_method (google.cloud.tasks_v2.types.HttpMethod): - The HTTP method to use for the request. The - default is POST. - headers (Sequence[google.cloud.tasks_v2.types.HttpRequest.HeadersEntry]): - HTTP request headers. - - This map contains the header field names and values. Headers - can be set when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - - These headers represent a subset of the headers that will - accompany the task's HTTP request. Some HTTP request headers - will be ignored or replaced. - - A partial list of headers that will be ignored or replaced - is: - - - Host: This will be computed by Cloud Tasks and derived - from - [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. - - Content-Length: This will be computed by Cloud Tasks. - - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. - - X-Google-*: Google use only. - - X-AppEngine-*: Google use only. - - ``Content-Type`` won't be set by Cloud Tasks. You can - explicitly set ``Content-Type`` to a media type when the - [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/octet-stream"`` or ``"application/json"``. - - Headers which can have multiple values (according to - RFC2616) can be specified using comma-separated values. - - The size of the headers must be less than 80KB. - body (bytes): - HTTP request body. - - A request body is allowed only if the [HTTP - method][google.cloud.tasks.v2.HttpRequest.http_method] is - POST, PUT, or PATCH. It is an error to set body on a task - with an incompatible - [HttpMethod][google.cloud.tasks.v2.HttpMethod]. - oauth_token (google.cloud.tasks_v2.types.OAuthToken): - If specified, an `OAuth - token `__ - will be generated and attached as an ``Authorization`` - header in the HTTP request. - - This type of authorization should generally only be used - when calling Google APIs hosted on \*.googleapis.com. - oidc_token (google.cloud.tasks_v2.types.OidcToken): - If specified, an - `OIDC `__ - token will be generated and attached as an ``Authorization`` - header in the HTTP request. - - This type of authorization can be used for many scenarios, - including calling Cloud Run, or endpoints where you intend - to validate the token yourself. - """ - - url = proto.Field( - proto.STRING, - number=1, - ) - http_method = proto.Field( - proto.ENUM, - number=2, - enum='HttpMethod', - ) - headers = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - body = proto.Field( - proto.BYTES, - number=4, - ) - oauth_token = proto.Field( - proto.MESSAGE, - number=5, - oneof='authorization_header', - message='OAuthToken', - ) - oidc_token = proto.Field( - proto.MESSAGE, - number=6, - oneof='authorization_header', - message='OidcToken', - ) - - -class AppEngineHttpRequest(proto.Message): - r"""App Engine HTTP request. - - The message defines the HTTP request that is sent to an App Engine - app when the task is dispatched. - - Using - [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] - requires - ```appengine.applications.get`` `__ - Google IAM permission for the project and the following scope: - - ``https://www.googleapis.com/auth/cloud-platform`` - - The task will be delivered to the App Engine app which belongs to - the same project as the queue. For more information, see `How - Requests are - Routed `__ - and how routing is affected by `dispatch - files `__. - Traffic is encrypted during transport and never leaves Google - datacenters. Because this traffic is carried over a communication - mechanism internal to Google, you cannot explicitly set the protocol - (for example, HTTP or HTTPS). The request to the handler, however, - will appear to have used the HTTP protocol. - - The [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] used - to construct the URL that the task is delivered to can be set at the - queue-level or task-level: - - - If [app_engine_routing_override is set on the - queue][Queue.app_engine_routing_override], this value is used for - all tasks in the queue, no matter what the setting is for the - [task-level - app_engine_routing][AppEngineHttpRequest.app_engine_routing]. - - The ``url`` that the task will be sent to is: - - - ``url =`` [host][google.cloud.tasks.v2.AppEngineRouting.host] - ``+`` - [relative_uri][google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri] - - Tasks can be dispatched to secure app handlers, unsecure app - handlers, and URIs restricted with - ```login: admin`` `__. - Because tasks are not run as any user, they cannot be dispatched to - URIs restricted with - ```login: required`` `__ - Task dispatches also do not follow redirects. - - The task attempt has succeeded if the app's request handler returns - an HTTP response code in the range [``200`` - ``299``]. The task - attempt has failed if the app's handler returns a non-2xx response - code or Cloud Tasks does not receive response before the - [deadline][google.cloud.tasks.v2.Task.dispatch_deadline]. Failed - tasks will be retried according to the [retry - configuration][google.cloud.tasks.v2.Queue.retry_config]. ``503`` - (Service Unavailable) is considered an App Engine system error - instead of an application error and will cause Cloud Tasks' traffic - congestion control to temporarily throttle the queue's dispatches. - Unlike other types of task targets, a ``429`` (Too Many Requests) - response from an app handler does not cause traffic congestion - control to throttle the queue. - - Attributes: - http_method (google.cloud.tasks_v2.types.HttpMethod): - The HTTP method to use for the request. The default is POST. - - The app's request handler for the task's target URL must be - able to handle HTTP requests with this http_method, - otherwise the task attempt will fail with error code 405 - (Method Not Allowed). See `Writing a push task request - handler `__ - and the documentation for the request handlers in the - language your app is written in e.g. `Python Request - Handler `__. - app_engine_routing (google.cloud.tasks_v2.types.AppEngineRouting): - Task-level setting for App Engine routing. - - - If [app_engine_routing_override is set on the - queue][Queue.app_engine_routing_override], this value is - used for all tasks in the queue, no matter what the - setting is for the [task-level - app_engine_routing][AppEngineHttpRequest.app_engine_routing]. - relative_uri (str): - The relative URI. - The relative URI must begin with "/" and must be - a valid HTTP relative URI. It can contain a path - and query string arguments. If the relative URI - is empty, then the root path "/" will be used. - No spaces are allowed, and the maximum length - allowed is 2083 characters. - headers (Sequence[google.cloud.tasks_v2.types.AppEngineHttpRequest.HeadersEntry]): - HTTP request headers. - - This map contains the header field names and values. Headers - can be set when the [task is - created][google.cloud.tasks.v2.CloudTasks.CreateTask]. - Repeated headers are not supported but a header value can - contain commas. - - Cloud Tasks sets some headers to default values: - - - ``User-Agent``: By default, this header is - ``"AppEngine-Google; (+http://code.google.com/appengine)"``. - This header can be modified, but Cloud Tasks will append - ``"AppEngine-Google; (+http://code.google.com/appengine)"`` - to the modified ``User-Agent``. - - If the task has a - [body][google.cloud.tasks.v2.AppEngineHttpRequest.body], - Cloud Tasks sets the following headers: - - - ``Content-Type``: By default, the ``Content-Type`` header - is set to ``"application/octet-stream"``. The default can - be overridden by explicitly setting ``Content-Type`` to a - particular media type when the [task is - created][google.cloud.tasks.v2.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/json"``. - - ``Content-Length``: This is computed by Cloud Tasks. This - value is output only. It cannot be changed. - - The headers below cannot be set or overridden: - - - ``Host`` - - ``X-Google-*`` - - ``X-AppEngine-*`` - - In addition, Cloud Tasks sets some headers when the task is - dispatched, such as headers containing information about the - task; see `request - headers `__. - These headers are set only when the task is dispatched, so - they are not visible when the task is returned in a Cloud - Tasks response. - - Although there is no specific limit for the maximum number - of headers or the size, there is a limit on the maximum size - of the [Task][google.cloud.tasks.v2.Task]. For more - information, see the - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] - documentation. - body (bytes): - HTTP request body. - - A request body is allowed only if the HTTP method is POST or - PUT. It is an error to set a body on a task with an - incompatible [HttpMethod][google.cloud.tasks.v2.HttpMethod]. - """ - - http_method = proto.Field( - proto.ENUM, - number=1, - enum='HttpMethod', - ) - app_engine_routing = proto.Field( - proto.MESSAGE, - number=2, - message='AppEngineRouting', - ) - relative_uri = proto.Field( - proto.STRING, - number=3, - ) - headers = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - body = proto.Field( - proto.BYTES, - number=5, - ) - - -class AppEngineRouting(proto.Message): - r"""App Engine Routing. - - Defines routing characteristics specific to App Engine - service, - version, and instance. - - For more information about services, versions, and instances see `An - Overview of App - Engine `__, - `Microservices Architecture on Google App - Engine `__, - `App Engine Standard request - routing `__, - and `App Engine Flex request - routing `__. - - Using [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] - requires - ```appengine.applications.get`` `__ - Google IAM permission for the project and the following scope: - - ``https://www.googleapis.com/auth/cloud-platform`` - - Attributes: - service (str): - App service. - - By default, the task is sent to the service which is the - default service when the task is attempted. - - For some queues or tasks which were created using the App - Engine Task Queue API, - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable into - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable, then - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance] - are the empty string. - version (str): - App version. - - By default, the task is sent to the version which is the - default version when the task is attempted. - - For some queues or tasks which were created using the App - Engine Task Queue API, - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable into - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable, then - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance] - are the empty string. - instance (str): - App instance. - - By default, the task is sent to an instance which is - available when the task is attempted. - - Requests can only be sent to a specific instance if `manual - scaling is used in App Engine - Standard `__. - App Engine Flex does not support instances. For more - information, see `App Engine Standard request - routing `__ - and `App Engine Flex request - routing `__. - host (str): - Output only. The host that the task is sent to. - - The host is constructed from the domain name of the app - associated with the queue's project ID (for example - .appspot.com), and the - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. - Tasks which were created using the App Engine SDK might have - a custom domain name. - - For more information, see `How Requests are - Routed `__. - """ - - service = proto.Field( - proto.STRING, - number=1, - ) - version = proto.Field( - proto.STRING, - number=2, - ) - instance = proto.Field( - proto.STRING, - number=3, - ) - host = proto.Field( - proto.STRING, - number=4, - ) - - -class OAuthToken(proto.Message): - r"""Contains information needed for generating an `OAuth - token `__. - This type of authorization should generally only be used when - calling Google APIs hosted on \*.googleapis.com. - - Attributes: - service_account_email (str): - `Service account - email `__ - to be used for generating OAuth token. The service account - must be within the same project as the queue. The caller - must have iam.serviceAccounts.actAs permission for the - service account. - scope (str): - OAuth scope to be used for generating OAuth - access token. If not specified, - "https://www.googleapis.com/auth/cloud-platform" - will be used. - """ - - service_account_email = proto.Field( - proto.STRING, - number=1, - ) - scope = proto.Field( - proto.STRING, - number=2, - ) - - -class OidcToken(proto.Message): - r"""Contains information needed for generating an `OpenID Connect - token `__. - This type of authorization can be used for many scenarios, including - calling Cloud Run, or endpoints where you intend to validate the - token yourself. - - Attributes: - service_account_email (str): - `Service account - email `__ - to be used for generating OIDC token. The service account - must be within the same project as the queue. The caller - must have iam.serviceAccounts.actAs permission for the - service account. - audience (str): - Audience to be used when generating OIDC - token. If not specified, the URI specified in - target will be used. - """ - - service_account_email = proto.Field( - proto.STRING, - number=1, - ) - audience = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py b/owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py deleted file mode 100644 index 69c7c3ad..00000000 --- a/owl-bot-staging/v2/google/cloud/tasks_v2/types/task.py +++ /dev/null @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2.types import target -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2', - manifest={ - 'Task', - 'Attempt', - }, -) - - -class Task(proto.Message): - r"""A unit of scheduled work. - Attributes: - name (str): - Optionally caller-specified in - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. - - The task name. - - The task name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For - more information, see `Identifying - projects `__ - - ``LOCATION_ID`` is the canonical ID for the task's - location. The list of available locations can be obtained - by calling - [ListLocations][google.cloud.location.Locations.ListLocations]. - For more information, see - https://cloud.google.com/about/locations/. - - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), or hyphens (-). The maximum length is 100 - characters. - - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), or underscores (_). The maximum - length is 500 characters. - app_engine_http_request (google.cloud.tasks_v2.types.AppEngineHttpRequest): - HTTP request that is sent to the App Engine app handler. - - An App Engine task is a task that has - [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] - set. - http_request (google.cloud.tasks_v2.types.HttpRequest): - HTTP request that is sent to the worker. - - An HTTP task is a task that has - [HttpRequest][google.cloud.tasks.v2.HttpRequest] set. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the task is scheduled to be attempted or - retried. - - ``schedule_time`` will be truncated to the nearest - microsecond. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that the task was created. - - ``create_time`` will be truncated to the nearest second. - dispatch_deadline (google.protobuf.duration_pb2.Duration): - The deadline for requests sent to the worker. If the worker - does not respond by this deadline then the request is - cancelled and the attempt is marked as a - ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the - task according to the - [RetryConfig][google.cloud.tasks.v2.RetryConfig]. - - Note that when the request is cancelled, Cloud Tasks will - stop listing for the response, but whether the worker stops - processing depends on the worker. For example, if the worker - is stuck, it may not react to cancelled requests. - - The default and maximum values depend on the type of - request: - - - For [HTTP tasks][google.cloud.tasks.v2.HttpRequest], the - default is 10 minutes. The deadline must be in the - interval [15 seconds, 30 minutes]. - - - For [App Engine - tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 - indicates that the request has the default deadline. The - default deadline depends on the `scaling - type `__ - of the service: 10 minutes for standard apps with - automatic scaling, 24 hours for standard apps with manual - and basic scaling, and 60 minutes for flex apps. If the - request deadline is set, it must be in the interval [15 - seconds, 24 hours 15 seconds]. Regardless of the task's - ``dispatch_deadline``, the app handler will not run for - longer than than the service's timeout. We recommend - setting the ``dispatch_deadline`` to at most a few - seconds more than the app handler's timeout. For more - information see - `Timeouts `__. - - ``dispatch_deadline`` will be truncated to the nearest - millisecond. The deadline is an approximate deadline. - dispatch_count (int): - Output only. The number of attempts - dispatched. - This count includes attempts which have been - dispatched but haven't received a response. - response_count (int): - Output only. The number of attempts which - have received a response. - first_attempt (google.cloud.tasks_v2.types.Attempt): - Output only. The status of the task's first attempt. - - Only - [dispatch_time][google.cloud.tasks.v2.Attempt.dispatch_time] - will be set. The other - [Attempt][google.cloud.tasks.v2.Attempt] information is not - retained by Cloud Tasks. - last_attempt (google.cloud.tasks_v2.types.Attempt): - Output only. The status of the task's last - attempt. - view (google.cloud.tasks_v2.types.Task.View): - Output only. The view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] has been returned. - """ - class View(proto.Enum): - r"""The view specifies a subset of [Task][google.cloud.tasks.v2.Task] - data. - - When a task is returned in a response, not all information is - retrieved by default because some data, such as payloads, might be - desirable to return only when needed because of its large size or - because of the sensitivity of data that it contains. - """ - VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - app_engine_http_request = proto.Field( - proto.MESSAGE, - number=2, - oneof='message_type', - message=target.AppEngineHttpRequest, - ) - http_request = proto.Field( - proto.MESSAGE, - number=3, - oneof='message_type', - message=target.HttpRequest, - ) - schedule_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - create_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - dispatch_deadline = proto.Field( - proto.MESSAGE, - number=6, - message=duration_pb2.Duration, - ) - dispatch_count = proto.Field( - proto.INT32, - number=7, - ) - response_count = proto.Field( - proto.INT32, - number=8, - ) - first_attempt = proto.Field( - proto.MESSAGE, - number=9, - message='Attempt', - ) - last_attempt = proto.Field( - proto.MESSAGE, - number=10, - message='Attempt', - ) - view = proto.Field( - proto.ENUM, - number=11, - enum=View, - ) - - -class Attempt(proto.Message): - r"""The status of a task attempt. - Attributes: - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt was scheduled. - - ``schedule_time`` will be truncated to the nearest - microsecond. - dispatch_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt was dispatched. - - ``dispatch_time`` will be truncated to the nearest - microsecond. - response_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt response was - received. - - ``response_time`` will be truncated to the nearest - microsecond. - response_status (google.rpc.status_pb2.Status): - Output only. The response from the worker for this attempt. - - If ``response_time`` is unset, then the task has not been - attempted or is currently running and the - ``response_status`` field is meaningless. - """ - - schedule_time = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - dispatch_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - response_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - response_status = proto.Field( - proto.MESSAGE, - number=4, - message=status_pb2.Status, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 4505b485..00000000 --- a/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index e9ae5e3c..00000000 --- a/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/tasks_v2/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py deleted file mode 100644 index fdfc7b38..00000000 --- a/owl-bot-staging/v2/scripts/fixup_tasks_v2_keywords.py +++ /dev/null @@ -1,191 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class tasksCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_queue': ('parent', 'queue', ), - 'create_task': ('parent', 'task', 'response_view', ), - 'delete_queue': ('name', ), - 'delete_task': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_queue': ('name', ), - 'get_task': ('name', 'response_view', ), - 'list_queues': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), - 'pause_queue': ('name', ), - 'purge_queue': ('name', ), - 'resume_queue': ('name', ), - 'run_task': ('name', 'response_view', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_queue': ('queue', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=tasksCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the tasks client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py deleted file mode 100644 index 2254eec1..00000000 --- a/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-tasks', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py b/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py deleted file mode 100644 index c2ea1d26..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/tasks_v2/test_cloud_tasks.py +++ /dev/null @@ -1,5181 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.tasks_v2.services.cloud_tasks import CloudTasksAsyncClient -from google.cloud.tasks_v2.services.cloud_tasks import CloudTasksClient -from google.cloud.tasks_v2.services.cloud_tasks import pagers -from google.cloud.tasks_v2.services.cloud_tasks import transports -from google.cloud.tasks_v2.services.cloud_tasks.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.tasks_v2.types import cloudtasks -from google.cloud.tasks_v2.types import queue -from google.cloud.tasks_v2.types import queue as gct_queue -from google.cloud.tasks_v2.types import target -from google.cloud.tasks_v2.types import task -from google.cloud.tasks_v2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudTasksClient._get_default_mtls_endpoint(None) is None - assert CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_called_with(True) - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -def test_cloud_tasks_client_get_transport_class(): - transport = CloudTasksClient.get_transport_class() - available_transports = [ - transports.CloudTasksGrpcTransport, - ] - assert transport in available_transports - - transport = CloudTasksClient.get_transport_class("grpc") - assert transport == transports.CloudTasksGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) -@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) -def test_cloud_tasks_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) -@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_tasks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_cloud_tasks_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_cloud_tasks_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_cloud_tasks_client_client_options_from_dict(): - with mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CloudTasksClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_list_queues(transport: str = 'grpc', request_type=cloudtasks.ListQueuesRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListQueuesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_queues_from_dict(): - test_list_queues(request_type=dict) - - -def test_list_queues_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - client.list_queues() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - -@pytest.mark.asyncio -async def test_list_queues_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListQueuesRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListQueuesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_queues_async_from_dict(): - await test_list_queues_async(request_type=dict) - - -def test_list_queues_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListQueuesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - call.return_value = cloudtasks.ListQueuesResponse() - client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_queues_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListQueuesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) - await client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_queues_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_queues( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_queues_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_queues( - cloudtasks.ListQueuesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_queues_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_queues( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_queues_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_queues( - cloudtasks.ListQueuesRequest(), - parent='parent_value', - ) - - -def test_list_queues_pager(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_queues(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, queue.Queue) - for i in results) - -def test_list_queues_pages(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - pages = list(client.list_queues(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_queues_async_pager(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_queues(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, queue.Queue) - for i in responses) - -@pytest.mark.asyncio -async def test_list_queues_async_pages(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_queues(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_queue(transport: str = 'grpc', request_type=cloudtasks.GetQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - ) - response = client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_get_queue_from_dict(): - test_get_queue(request_type=dict) - - -def test_get_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - client.get_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - -@pytest.mark.asyncio -async def test_get_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_get_queue_async_from_dict(): - await test_get_queue_async(request_type=dict) - - -def test_get_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_queue( - cloudtasks.GetQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_queue( - cloudtasks.GetQueueRequest(), - name='name_value', - ) - - -def test_create_queue(transport: str = 'grpc', request_type=cloudtasks.CreateQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - ) - response = client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -def test_create_queue_from_dict(): - test_create_queue(request_type=dict) - - -def test_create_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - client.create_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - -@pytest.mark.asyncio -async def test_create_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - )) - response = await client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_create_queue_async_from_dict(): - await test_create_queue_async(request_type=dict) - - -def test_create_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateQueueRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - call.return_value = gct_queue.Queue() - client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateQueueRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - await client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_queue( - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].queue == gct_queue.Queue(name='name_value') - - -def test_create_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_queue( - cloudtasks.CreateQueueRequest(), - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_queue( - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].queue == gct_queue.Queue(name='name_value') - - -@pytest.mark.asyncio -async def test_create_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_queue( - cloudtasks.CreateQueueRequest(), - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - -def test_update_queue(transport: str = 'grpc', request_type=cloudtasks.UpdateQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - ) - response = client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -def test_update_queue_from_dict(): - test_update_queue(request_type=dict) - - -def test_update_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - client.update_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - -@pytest.mark.asyncio -async def test_update_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.UpdateQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - )) - response = await client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_update_queue_async_from_dict(): - await test_update_queue_async(request_type=dict) - - -def test_update_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.UpdateQueueRequest() - - request.queue.name = 'queue.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - call.return_value = gct_queue.Queue() - client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'queue.name=queue.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.UpdateQueueRequest() - - request.queue.name = 'queue.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - await client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'queue.name=queue.name/value', - ) in kw['metadata'] - - -def test_update_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_queue( - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].queue == gct_queue.Queue(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_queue( - cloudtasks.UpdateQueueRequest(), - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_queue( - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].queue == gct_queue.Queue(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_queue( - cloudtasks.UpdateQueueRequest(), - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_queue(transport: str = 'grpc', request_type=cloudtasks.DeleteQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_queue_from_dict(): - test_delete_queue(request_type=dict) - - -def test_delete_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - client.delete_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - -@pytest.mark.asyncio -async def test_delete_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_queue_async_from_dict(): - await test_delete_queue_async(request_type=dict) - - -def test_delete_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - call.return_value = None - client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_queue( - cloudtasks.DeleteQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_queue( - cloudtasks.DeleteQueueRequest(), - name='name_value', - ) - - -def test_purge_queue(transport: str = 'grpc', request_type=cloudtasks.PurgeQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - ) - response = client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_purge_queue_from_dict(): - test_purge_queue(request_type=dict) - - -def test_purge_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - client.purge_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - -@pytest.mark.asyncio -async def test_purge_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PurgeQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_purge_queue_async_from_dict(): - await test_purge_queue_async(request_type=dict) - - -def test_purge_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PurgeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_purge_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PurgeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_purge_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.purge_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_purge_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.purge_queue( - cloudtasks.PurgeQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_purge_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.purge_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_purge_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.purge_queue( - cloudtasks.PurgeQueueRequest(), - name='name_value', - ) - - -def test_pause_queue(transport: str = 'grpc', request_type=cloudtasks.PauseQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - ) - response = client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_pause_queue_from_dict(): - test_pause_queue(request_type=dict) - - -def test_pause_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - client.pause_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - -@pytest.mark.asyncio -async def test_pause_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PauseQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_pause_queue_async_from_dict(): - await test_pause_queue_async(request_type=dict) - - -def test_pause_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PauseQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_pause_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PauseQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_pause_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.pause_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_pause_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.pause_queue( - cloudtasks.PauseQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_pause_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.pause_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_pause_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.pause_queue( - cloudtasks.PauseQueueRequest(), - name='name_value', - ) - - -def test_resume_queue(transport: str = 'grpc', request_type=cloudtasks.ResumeQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - ) - response = client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_resume_queue_from_dict(): - test_resume_queue(request_type=dict) - - -def test_resume_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - client.resume_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - -@pytest.mark.asyncio -async def test_resume_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ResumeQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_resume_queue_async_from_dict(): - await test_resume_queue_async(request_type=dict) - - -def test_resume_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ResumeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_resume_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ResumeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_resume_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resume_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_resume_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resume_queue( - cloudtasks.ResumeQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_resume_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.resume_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_resume_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.resume_queue( - cloudtasks.ResumeQueueRequest(), - name='name_value', - ) - - -def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_from_dict(): - test_get_iam_policy(request_type=dict) - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_get_iam_policy_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_from_dict(): - test_set_iam_policy(request_type=dict) - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_set_iam_policy_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_from_dict(): - test_test_iam_permissions(request_type=dict) - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -def test_test_iam_permissions_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_list_tasks(transport: str = 'grpc', request_type=cloudtasks.ListTasksRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse( - next_page_token='next_page_token_value', - ) - response = client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_tasks_from_dict(): - test_list_tasks(request_type=dict) - - -def test_list_tasks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - client.list_tasks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - -@pytest.mark.asyncio -async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListTasksRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) - - -def test_list_tasks_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = cloudtasks.ListTasksResponse() - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_tasks_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_tasks_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - cloudtasks.ListTasksRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_tasks_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tasks( - cloudtasks.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_pager(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tasks(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, task.Task) - for i in results) - -def test_list_tasks_pages(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, task.Task) - for i in responses) - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_tasks(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_task(transport: str = 'grpc', request_type=cloudtasks.GetTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -def test_get_task_from_dict(): - test_get_task(request_type=dict) - - -def test_get_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - client.get_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - -@pytest.mark.asyncio -async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - )) - response = await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) - - -def test_get_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = task.Task() - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - cloudtasks.GetTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_task( - cloudtasks.GetTaskRequest(), - name='name_value', - ) - - -def test_create_task(transport: str = 'grpc', request_type=cloudtasks.CreateTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=gct_task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == gct_task.Task.View.BASIC - - -def test_create_task_from_dict(): - test_create_task(request_type=dict) - - -def test_create_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - client.create_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - -@pytest.mark.asyncio -async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=gct_task.Task.View.BASIC, - )) - response = await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == gct_task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_create_task_async_from_dict(): - await test_create_task_async(request_type=dict) - - -def test_create_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateTaskRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = gct_task.Task() - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateTaskRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) - await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_task( - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].task == gct_task.Task(name='name_value') - - -def test_create_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_task( - cloudtasks.CreateTaskRequest(), - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_task( - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].task == gct_task.Task(name='name_value') - - -@pytest.mark.asyncio -async def test_create_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_task( - cloudtasks.CreateTaskRequest(), - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - -def test_delete_task(transport: str = 'grpc', request_type=cloudtasks.DeleteTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_task_from_dict(): - test_delete_task(request_type=dict) - - -def test_delete_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - client.delete_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - -@pytest.mark.asyncio -async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_task_async_from_dict(): - await test_delete_task_async(request_type=dict) - - -def test_delete_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = None - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_task( - cloudtasks.DeleteTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_task( - cloudtasks.DeleteTaskRequest(), - name='name_value', - ) - - -def test_run_task(transport: str = 'grpc', request_type=cloudtasks.RunTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -def test_run_task_from_dict(): - test_run_task(request_type=dict) - - -def test_run_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - client.run_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - -@pytest.mark.asyncio -async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RunTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - )) - response = await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_run_task_async_from_dict(): - await test_run_task_async(request_type=dict) - - -def test_run_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RunTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = task.Task() - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RunTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_run_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_run_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_task( - cloudtasks.RunTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_run_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_run_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_task( - cloudtasks.RunTaskRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudTasksClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudTasksGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudTasksGrpcTransport, - ) - -def test_cloud_tasks_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudTasksTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_cloud_tasks_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CloudTasksTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_queues', - 'get_queue', - 'create_queue', - 'update_queue', - 'delete_queue', - 'purge_queue', - 'pause_queue', - 'resume_queue', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - 'list_tasks', - 'get_task', - 'create_task', - 'delete_task', - 'run_task', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - -def test_cloud_tasks_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudTasksClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudTasksClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudtasks.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_cloud_tasks_host_no_port(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com'), - ) - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -def test_cloud_tasks_host_with_port(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com:8000'), - ) - assert client.transport._host == 'cloudtasks.googleapis.com:8000' - -def test_cloud_tasks_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudTasksGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_tasks_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudTasksGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_queue_path(): - project = "squid" - location = "clam" - queue = "whelk" - expected = "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) - actual = CloudTasksClient.queue_path(project, location, queue) - assert expected == actual - - -def test_parse_queue_path(): - expected = { - "project": "octopus", - "location": "oyster", - "queue": "nudibranch", - } - path = CloudTasksClient.queue_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_queue_path(path) - assert expected == actual - -def test_task_path(): - project = "cuttlefish" - location = "mussel" - queue = "winkle" - task = "nautilus" - expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) - actual = CloudTasksClient.task_path(project, location, queue, task) - assert expected == actual - - -def test_parse_task_path(): - expected = { - "project": "scallop", - "location": "abalone", - "queue": "squid", - "task": "clam", - } - path = CloudTasksClient.task_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_task_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CloudTasksClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = CloudTasksClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = CloudTasksClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = CloudTasksClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CloudTasksClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = CloudTasksClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = CloudTasksClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = CloudTasksClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CloudTasksClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = CloudTasksClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: - transport_class = CloudTasksClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v2beta2/.coveragerc b/owl-bot-staging/v2beta2/.coveragerc deleted file mode 100644 index 1d5bc53f..00000000 --- a/owl-bot-staging/v2beta2/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/tasks/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2beta2/MANIFEST.in b/owl-bot-staging/v2beta2/MANIFEST.in deleted file mode 100644 index fa24e936..00000000 --- a/owl-bot-staging/v2beta2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/tasks *.py -recursive-include google/cloud/tasks_v2beta2 *.py diff --git a/owl-bot-staging/v2beta2/README.rst b/owl-bot-staging/v2beta2/README.rst deleted file mode 100644 index 6171a7e2..00000000 --- a/owl-bot-staging/v2beta2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Tasks API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Tasks API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2beta2/docs/conf.py b/owl-bot-staging/v2beta2/docs/conf.py deleted file mode 100644 index 62c563cc..00000000 --- a/owl-bot-staging/v2beta2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-tasks documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-tasks" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-tasks-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-tasks.tex", - u"google-cloud-tasks Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-tasks", - u"Google Cloud Tasks Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-tasks", - u"google-cloud-tasks Documentation", - author, - "google-cloud-tasks", - "GAPIC library for Google Cloud Tasks API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2beta2/docs/index.rst b/owl-bot-staging/v2beta2/docs/index.rst deleted file mode 100644 index 56b75351..00000000 --- a/owl-bot-staging/v2beta2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - tasks_v2beta2/services - tasks_v2beta2/types diff --git a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst deleted file mode 100644 index eacf8fb7..00000000 --- a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/cloud_tasks.rst +++ /dev/null @@ -1,10 +0,0 @@ -CloudTasks ----------------------------- - -.. automodule:: google.cloud.tasks_v2beta2.services.cloud_tasks - :members: - :inherited-members: - -.. automodule:: google.cloud.tasks_v2beta2.services.cloud_tasks.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst deleted file mode 100644 index 4273c20e..00000000 --- a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Tasks v2beta2 API -=========================================== -.. toctree:: - :maxdepth: 2 - - cloud_tasks diff --git a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst b/owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst deleted file mode 100644 index 4a688d27..00000000 --- a/owl-bot-staging/v2beta2/docs/tasks_v2beta2/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Tasks v2beta2 API -======================================== - -.. automodule:: google.cloud.tasks_v2beta2.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py deleted file mode 100644 index a26a8d59..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks/__init__.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.tasks_v2beta2.services.cloud_tasks.client import CloudTasksClient -from google.cloud.tasks_v2beta2.services.cloud_tasks.async_client import CloudTasksAsyncClient - -from google.cloud.tasks_v2beta2.types.cloudtasks import AcknowledgeTaskRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import CancelLeaseRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import CreateQueueRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import CreateTaskRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import DeleteQueueRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import DeleteTaskRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import GetQueueRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import GetTaskRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import LeaseTasksRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import LeaseTasksResponse -from google.cloud.tasks_v2beta2.types.cloudtasks import ListQueuesRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import ListQueuesResponse -from google.cloud.tasks_v2beta2.types.cloudtasks import ListTasksRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import ListTasksResponse -from google.cloud.tasks_v2beta2.types.cloudtasks import PauseQueueRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import PurgeQueueRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import RenewLeaseRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import ResumeQueueRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import RunTaskRequest -from google.cloud.tasks_v2beta2.types.cloudtasks import UpdateQueueRequest -from google.cloud.tasks_v2beta2.types.queue import Queue -from google.cloud.tasks_v2beta2.types.queue import QueueStats -from google.cloud.tasks_v2beta2.types.queue import RateLimits -from google.cloud.tasks_v2beta2.types.queue import RetryConfig -from google.cloud.tasks_v2beta2.types.target import AppEngineHttpRequest -from google.cloud.tasks_v2beta2.types.target import AppEngineHttpTarget -from google.cloud.tasks_v2beta2.types.target import AppEngineRouting -from google.cloud.tasks_v2beta2.types.target import PullMessage -from google.cloud.tasks_v2beta2.types.target import PullTarget -from google.cloud.tasks_v2beta2.types.target import HttpMethod -from google.cloud.tasks_v2beta2.types.task import AttemptStatus -from google.cloud.tasks_v2beta2.types.task import Task -from google.cloud.tasks_v2beta2.types.task import TaskStatus - -__all__ = ('CloudTasksClient', - 'CloudTasksAsyncClient', - 'AcknowledgeTaskRequest', - 'CancelLeaseRequest', - 'CreateQueueRequest', - 'CreateTaskRequest', - 'DeleteQueueRequest', - 'DeleteTaskRequest', - 'GetQueueRequest', - 'GetTaskRequest', - 'LeaseTasksRequest', - 'LeaseTasksResponse', - 'ListQueuesRequest', - 'ListQueuesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'PauseQueueRequest', - 'PurgeQueueRequest', - 'RenewLeaseRequest', - 'ResumeQueueRequest', - 'RunTaskRequest', - 'UpdateQueueRequest', - 'Queue', - 'QueueStats', - 'RateLimits', - 'RetryConfig', - 'AppEngineHttpRequest', - 'AppEngineHttpTarget', - 'AppEngineRouting', - 'PullMessage', - 'PullTarget', - 'HttpMethod', - 'AttemptStatus', - 'Task', - 'TaskStatus', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks/py.typed b/owl-bot-staging/v2beta2/google/cloud/tasks/py.typed deleted file mode 100644 index 41f0b1b8..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py deleted file mode 100644 index 8fb2af2c..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.cloud_tasks import CloudTasksClient -from .services.cloud_tasks import CloudTasksAsyncClient - -from .types.cloudtasks import AcknowledgeTaskRequest -from .types.cloudtasks import CancelLeaseRequest -from .types.cloudtasks import CreateQueueRequest -from .types.cloudtasks import CreateTaskRequest -from .types.cloudtasks import DeleteQueueRequest -from .types.cloudtasks import DeleteTaskRequest -from .types.cloudtasks import GetQueueRequest -from .types.cloudtasks import GetTaskRequest -from .types.cloudtasks import LeaseTasksRequest -from .types.cloudtasks import LeaseTasksResponse -from .types.cloudtasks import ListQueuesRequest -from .types.cloudtasks import ListQueuesResponse -from .types.cloudtasks import ListTasksRequest -from .types.cloudtasks import ListTasksResponse -from .types.cloudtasks import PauseQueueRequest -from .types.cloudtasks import PurgeQueueRequest -from .types.cloudtasks import RenewLeaseRequest -from .types.cloudtasks import ResumeQueueRequest -from .types.cloudtasks import RunTaskRequest -from .types.cloudtasks import UpdateQueueRequest -from .types.queue import Queue -from .types.queue import QueueStats -from .types.queue import RateLimits -from .types.queue import RetryConfig -from .types.target import AppEngineHttpRequest -from .types.target import AppEngineHttpTarget -from .types.target import AppEngineRouting -from .types.target import PullMessage -from .types.target import PullTarget -from .types.target import HttpMethod -from .types.task import AttemptStatus -from .types.task import Task -from .types.task import TaskStatus - -__all__ = ( - 'CloudTasksAsyncClient', -'AcknowledgeTaskRequest', -'AppEngineHttpRequest', -'AppEngineHttpTarget', -'AppEngineRouting', -'AttemptStatus', -'CancelLeaseRequest', -'CloudTasksClient', -'CreateQueueRequest', -'CreateTaskRequest', -'DeleteQueueRequest', -'DeleteTaskRequest', -'GetQueueRequest', -'GetTaskRequest', -'HttpMethod', -'LeaseTasksRequest', -'LeaseTasksResponse', -'ListQueuesRequest', -'ListQueuesResponse', -'ListTasksRequest', -'ListTasksResponse', -'PauseQueueRequest', -'PullMessage', -'PullTarget', -'PurgeQueueRequest', -'Queue', -'QueueStats', -'RateLimits', -'RenewLeaseRequest', -'ResumeQueueRequest', -'RetryConfig', -'RunTaskRequest', -'Task', -'TaskStatus', -'UpdateQueueRequest', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json deleted file mode 100644 index 777cbb14..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/gapic_metadata.json +++ /dev/null @@ -1,223 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.tasks_v2beta2", - "protoPackage": "google.cloud.tasks.v2beta2", - "schema": "1.0", - "services": { - "CloudTasks": { - "clients": { - "grpc": { - "libraryClient": "CloudTasksClient", - "rpcs": { - "AcknowledgeTask": { - "methods": [ - "acknowledge_task" - ] - }, - "CancelLease": { - "methods": [ - "cancel_lease" - ] - }, - "CreateQueue": { - "methods": [ - "create_queue" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "DeleteQueue": { - "methods": [ - "delete_queue" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetQueue": { - "methods": [ - "get_queue" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "LeaseTasks": { - "methods": [ - "lease_tasks" - ] - }, - "ListQueues": { - "methods": [ - "list_queues" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "PauseQueue": { - "methods": [ - "pause_queue" - ] - }, - "PurgeQueue": { - "methods": [ - "purge_queue" - ] - }, - "RenewLease": { - "methods": [ - "renew_lease" - ] - }, - "ResumeQueue": { - "methods": [ - "resume_queue" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateQueue": { - "methods": [ - "update_queue" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudTasksAsyncClient", - "rpcs": { - "AcknowledgeTask": { - "methods": [ - "acknowledge_task" - ] - }, - "CancelLease": { - "methods": [ - "cancel_lease" - ] - }, - "CreateQueue": { - "methods": [ - "create_queue" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "DeleteQueue": { - "methods": [ - "delete_queue" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetQueue": { - "methods": [ - "get_queue" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "LeaseTasks": { - "methods": [ - "lease_tasks" - ] - }, - "ListQueues": { - "methods": [ - "list_queues" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "PauseQueue": { - "methods": [ - "pause_queue" - ] - }, - "PurgeQueue": { - "methods": [ - "purge_queue" - ] - }, - "RenewLease": { - "methods": [ - "renew_lease" - ] - }, - "ResumeQueue": { - "methods": [ - "resume_queue" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateQueue": { - "methods": [ - "update_queue" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed deleted file mode 100644 index 41f0b1b8..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py deleted file mode 100644 index 4de65971..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py deleted file mode 100644 index 1478acb5..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CloudTasksClient -from .async_client import CloudTasksAsyncClient - -__all__ = ( - 'CloudTasksClient', - 'CloudTasksAsyncClient', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py deleted file mode 100644 index a544c2c9..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py +++ /dev/null @@ -1,2249 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers -from google.cloud.tasks_v2beta2.types import cloudtasks -from google.cloud.tasks_v2beta2.types import queue -from google.cloud.tasks_v2beta2.types import queue as gct_queue -from google.cloud.tasks_v2beta2.types import target -from google.cloud.tasks_v2beta2.types import task -from google.cloud.tasks_v2beta2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport -from .client import CloudTasksClient - - -class CloudTasksAsyncClient: - """Cloud Tasks allows developers to manage the execution of - background work in their applications. - """ - - _client: CloudTasksClient - - DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT - - queue_path = staticmethod(CloudTasksClient.queue_path) - parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) - task_path = staticmethod(CloudTasksClient.task_path) - parse_task_path = staticmethod(CloudTasksClient.parse_task_path) - common_billing_account_path = staticmethod(CloudTasksClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CloudTasksClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CloudTasksClient.common_folder_path) - parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) - common_organization_path = staticmethod(CloudTasksClient.common_organization_path) - parse_common_organization_path = staticmethod(CloudTasksClient.parse_common_organization_path) - common_project_path = staticmethod(CloudTasksClient.common_project_path) - parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) - common_location_path = staticmethod(CloudTasksClient.common_location_path) - parse_common_location_path = staticmethod(CloudTasksClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksAsyncClient: The constructed client. - """ - return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksAsyncClient: The constructed client. - """ - return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudTasksTransport: - """Returns the transport used by the client instance. - - Returns: - CloudTasksTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(CloudTasksClient).get_transport_class, type(CloudTasksClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, CloudTasksTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud tasks client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudTasksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudTasksClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_queues(self, - request: cloudtasks.ListQueuesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListQueuesAsyncPager: - r"""Lists queues. - Queues are returned in lexicographical order. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.ListQueuesRequest`): - The request object. Request message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - parent (:class:`str`): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListQueuesAsyncPager: - Response message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ListQueuesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_queues, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListQueuesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_queue(self, - request: cloudtasks.GetQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Gets a queue. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.GetQueueRequest`): - The request object. Request message for - [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. - name (:class:`str`): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.GetQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_queue(self, - request: cloudtasks.CreateQueueRequest = None, - *, - parent: str = None, - queue: gct_queue.Queue = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.CreateQueueRequest`): - The request object. Request message for - [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. - parent (:class:`str`): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - queue (:class:`google.cloud.tasks_v2beta2.types.Queue`): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2beta2.Queue.name] - cannot be the same as an existing queue. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, queue]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.CreateQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if queue is not None: - request.queue = queue - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_queue(self, - request: cloudtasks.UpdateQueueRequest = None, - *, - queue: gct_queue.Queue = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.UpdateQueueRequest`): - The request object. Request message for - [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. - queue (:class:`google.cloud.tasks_v2beta2.types.Queue`): - Required. The queue to create or update. - - The queue's - [name][google.cloud.tasks.v2beta2.Queue.name] must be - specified. - - Output only fields cannot be modified using UpdateQueue. - Any value specified for an output only field will be - ignored. The queue's - [name][google.cloud.tasks.v2beta2.Queue.name] cannot be - changed. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - A mask used to specify which fields - of the queue are being updated. - If empty, then all fields will be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.UpdateQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("queue.name", request.queue.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_queue(self, - request: cloudtasks.DeleteQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.DeleteQueueRequest`): - The request object. Request message for - [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.DeleteQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def purge_queue(self, - request: cloudtasks.PurgeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.PurgeQueueRequest`): - The request object. Request message for - [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.PurgeQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.purge_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def pause_queue(self, - request: cloudtasks.PauseQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.PauseQueueRequest`): - The request object. Request message for - [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.PauseQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.pause_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def resume_queue(self, - request: cloudtasks.ResumeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta2.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.ResumeQueueRequest`): - The request object. Request message for - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ResumeQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.resume_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`Sequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_tasks(self, - request: cloudtasks.ListTasksRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksAsyncPager: - r"""Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.ListTasksRequest`): - The request object. Request message for listing tasks - using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - parent (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListTasksAsyncPager: - Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ListTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_tasks, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTasksAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_task(self, - request: cloudtasks.GetTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Gets a task. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.GetTaskRequest`): - The request object. Request message for getting a task - using - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.GetTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_task(self, - request: cloudtasks.CreateTaskRequest = None, - *, - parent: str = None, - task: gct_task.Task = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_task.Task: - r"""Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - For [App Engine - queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the - maximum task size is 100KB. - - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the - maximum task size is 1MB. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.CreateTaskRequest`): - The request object. Request message for - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - parent (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (:class:`google.cloud.tasks_v2beta2.types.Task`): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2beta2.Task.name]. If a name - is not specified then the system will generate a random - unique task id, which will be set in the task returned - in the [response][google.cloud.tasks.v2beta2.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set - it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task - de-duplication. If a task's ID is identical to that of - an existing task or a task that was deleted or completed - recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour - after the original task was deleted or completed. If the - task's queue was created using queue.yaml or queue.xml, - then another task with the same name can't be created - for ~9days after the original task was deleted or - completed. - - Because there is an extra lookup cost to identify - duplicate task names, these - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id - is recommended. Choosing task ids that are sequential or - have sequential prefixes, for example using a timestamp, - causes an increase in latency and error rates in all - task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store - and serve tasks efficiently. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.CreateTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_task, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_task(self, - request: cloudtasks.DeleteTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has completed - successfully or permanently failed. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.DeleteTaskRequest`): - The request object. Request message for deleting a task - using - [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.DeleteTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def lease_tasks(self, - request: cloudtasks.LeaseTasksRequest = None, - *, - parent: str = None, - lease_duration: duration_pb2.Duration = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.LeaseTasksResponse: - r"""Leases tasks from a pull queue for - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. - - This method is invoked by the worker to obtain a lease. The - worker must acknowledge the task via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] - after they have performed the work associated with the task. - - The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is - intended to store data that the worker needs to perform the work - associated with the task. To return the payloads in the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set - [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] - to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. - - A maximum of 10 qps of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - requests are allowed per queue. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is - returned when this limit is exceeded. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also - returned when - [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] - is exceeded. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.LeaseTasksRequest`): - The request object. Request message for leasing tasks - using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - parent (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lease_duration (:class:`google.protobuf.duration_pb2.Duration`): - Required. The duration of the lease. - - Each task returned in the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] - will have its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - set to the current time plus the ``lease_duration``. The - task is leased until its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; - thus, the task will not be returned to another - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call before its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - - After the worker has successfully finished the work - associated with the task, the worker must call via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] - before the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - Otherwise the task will be returned to a later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call so that another worker can retry it. - - The maximum lease duration is 1 week. ``lease_duration`` - will be truncated to the nearest second. - - This corresponds to the ``lease_duration`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.LeaseTasksResponse: - Response message for leasing tasks using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, lease_duration]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.LeaseTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lease_duration is not None: - request.lease_duration = lease_duration - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.lease_tasks, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def acknowledge_task(self, - request: cloudtasks.AcknowledgeTaskRequest = None, - *, - name: str = None, - schedule_time: timestamp_pb2.Timestamp = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Acknowledges a pull task. - - The worker, that is, the entity that - [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this - task must call this method to indicate that the work associated - with the task has finished. - - The worker must acknowledge a task within the - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] - or the lease will expire and the task will become available to - be leased again. After the task is acknowledged, it will not be - returned by a later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.AcknowledgeTaskRequest`): - The request object. Request message for acknowledging a - task using - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Required. The task's current schedule time, available in - the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.AcknowledgeTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.acknowledge_task, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def renew_lease(self, - request: cloudtasks.RenewLeaseRequest = None, - *, - name: str = None, - schedule_time: timestamp_pb2.Timestamp = None, - lease_duration: duration_pb2.Duration = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Renew the current lease of a pull task. - - The worker can use this method to extend the lease by a new - duration, starting from now. The new task lease will be returned - in the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.RenewLeaseRequest`): - The request object. Request message for renewing a lease - using - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Required. The task's current schedule time, available in - the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lease_duration (:class:`google.protobuf.duration_pb2.Duration`): - Required. The desired new lease duration, starting from - now. - - The maximum lease duration is 1 week. ``lease_duration`` - will be truncated to the nearest second. - - This corresponds to the ``lease_duration`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schedule_time, lease_duration]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.RenewLeaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if schedule_time is not None: - request.schedule_time = schedule_time - if lease_duration is not None: - request.lease_duration = lease_duration - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.renew_lease, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_lease(self, - request: cloudtasks.CancelLeaseRequest = None, - *, - name: str = None, - schedule_time: timestamp_pb2.Timestamp = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Cancel a pull task's lease. - - The worker can use this method to cancel a task's lease by - setting its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - to now. This will make the task available to be leased to the - next caller of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.CancelLeaseRequest`): - The request object. Request message for canceling a - lease using - [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Required. The task's current schedule time, available in - the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.CancelLeaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_lease, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_task(self, - request: cloudtasks.RunTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the - [status][google.cloud.tasks.v2beta2.Task.status] after the task - is dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot - be called on a [pull - task][google.cloud.tasks.v2beta2.PullMessage]. - - Args: - request (:class:`google.cloud.tasks_v2beta2.types.RunTaskRequest`): - The request object. Request message for forcing a task - to run now using - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.RunTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_task, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-tasks", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "CloudTasksAsyncClient", -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py deleted file mode 100644 index 169550c4..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py +++ /dev/null @@ -1,2388 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers -from google.cloud.tasks_v2beta2.types import cloudtasks -from google.cloud.tasks_v2beta2.types import queue -from google.cloud.tasks_v2beta2.types import queue as gct_queue -from google.cloud.tasks_v2beta2.types import target -from google.cloud.tasks_v2beta2.types import task -from google.cloud.tasks_v2beta2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CloudTasksGrpcTransport -from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport - - -class CloudTasksClientMeta(type): - """Metaclass for the CloudTasks client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] - _transport_registry["grpc"] = CloudTasksGrpcTransport - _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[CloudTasksTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudTasksClient(metaclass=CloudTasksClientMeta): - """Cloud Tasks allows developers to manage the execution of - background work in their applications. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudTasksTransport: - """Returns the transport used by the client instance. - - Returns: - CloudTasksTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def queue_path(project: str,location: str,queue: str,) -> str: - """Returns a fully-qualified queue string.""" - return "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) - - @staticmethod - def parse_queue_path(path: str) -> Dict[str,str]: - """Parses a queue path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_path(project: str,location: str,queue: str,task: str,) -> str: - """Returns a fully-qualified task string.""" - return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) - - @staticmethod - def parse_task_path(path: str) -> Dict[str,str]: - """Parses a task path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudTasksTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud tasks client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudTasksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudTasksTransport): - # transport is a CloudTasksTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - ) - - def list_queues(self, - request: cloudtasks.ListQueuesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListQueuesPager: - r"""Lists queues. - Queues are returned in lexicographical order. - - Args: - request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): - The request object. Request message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - parent (str): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListQueuesPager: - Response message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ListQueuesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ListQueuesRequest): - request = cloudtasks.ListQueuesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_queues] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListQueuesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_queue(self, - request: cloudtasks.GetQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Gets a queue. - - Args: - request (google.cloud.tasks_v2beta2.types.GetQueueRequest): - The request object. Request message for - [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. - name (str): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.GetQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.GetQueueRequest): - request = cloudtasks.GetQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_queue(self, - request: cloudtasks.CreateQueueRequest = None, - *, - parent: str = None, - queue: gct_queue.Queue = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2beta2.types.CreateQueueRequest): - The request object. Request message for - [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. - parent (str): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - queue (google.cloud.tasks_v2beta2.types.Queue): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2beta2.Queue.name] - cannot be the same as an existing queue. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, queue]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.CreateQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.CreateQueueRequest): - request = cloudtasks.CreateQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if queue is not None: - request.queue = queue - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_queue(self, - request: cloudtasks.UpdateQueueRequest = None, - *, - queue: gct_queue.Queue = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2beta2.types.UpdateQueueRequest): - The request object. Request message for - [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. - queue (google.cloud.tasks_v2beta2.types.Queue): - Required. The queue to create or update. - - The queue's - [name][google.cloud.tasks.v2beta2.Queue.name] must be - specified. - - Output only fields cannot be modified using UpdateQueue. - Any value specified for an output only field will be - ignored. The queue's - [name][google.cloud.tasks.v2beta2.Queue.name] cannot be - changed. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask used to specify which fields - of the queue are being updated. - If empty, then all fields will be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.UpdateQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.UpdateQueueRequest): - request = cloudtasks.UpdateQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("queue.name", request.queue.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_queue(self, - request: cloudtasks.DeleteQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2beta2.types.DeleteQueueRequest): - The request object. Request message for - [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.DeleteQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.DeleteQueueRequest): - request = cloudtasks.DeleteQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def purge_queue(self, - request: cloudtasks.PurgeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Args: - request (google.cloud.tasks_v2beta2.types.PurgeQueueRequest): - The request object. Request message for - [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.PurgeQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.PurgeQueueRequest): - request = cloudtasks.PurgeQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.purge_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def pause_queue(self, - request: cloudtasks.PauseQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - Args: - request (google.cloud.tasks_v2beta2.types.PauseQueueRequest): - The request object. Request message for - [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.PauseQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.PauseQueueRequest): - request = cloudtasks.PauseQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.pause_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def resume_queue(self, - request: cloudtasks.ResumeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta2.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Args: - request (google.cloud.tasks_v2beta2.types.ResumeQueueRequest): - The request object. Request message for - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, target types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ResumeQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ResumeQueueRequest): - request = cloudtasks.ResumeQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resume_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Args: - request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for `GetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Args: - request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for `SetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Args: - request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for - `TestIamPermissions` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (Sequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_tasks(self, - request: cloudtasks.ListTasksRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksPager: - r"""Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Args: - request (google.cloud.tasks_v2beta2.types.ListTasksRequest): - The request object. Request message for listing tasks - using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListTasksPager: - Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ListTasksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ListTasksRequest): - request = cloudtasks.ListTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTasksPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_task(self, - request: cloudtasks.GetTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Gets a task. - - Args: - request (google.cloud.tasks_v2beta2.types.GetTaskRequest): - The request object. Request message for getting a task - using - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.GetTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.GetTaskRequest): - request = cloudtasks.GetTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_task(self, - request: cloudtasks.CreateTaskRequest = None, - *, - parent: str = None, - task: gct_task.Task = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_task.Task: - r"""Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - For [App Engine - queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the - maximum task size is 100KB. - - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the - maximum task size is 1MB. - - Args: - request (google.cloud.tasks_v2beta2.types.CreateTaskRequest): - The request object. Request message for - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (google.cloud.tasks_v2beta2.types.Task): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2beta2.Task.name]. If a name - is not specified then the system will generate a random - unique task id, which will be set in the task returned - in the [response][google.cloud.tasks.v2beta2.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set - it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task - de-duplication. If a task's ID is identical to that of - an existing task or a task that was deleted or completed - recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour - after the original task was deleted or completed. If the - task's queue was created using queue.yaml or queue.xml, - then another task with the same name can't be created - for ~9days after the original task was deleted or - completed. - - Because there is an extra lookup cost to identify - duplicate task names, these - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id - is recommended. Choosing task ids that are sequential or - have sequential prefixes, for example using a timestamp, - causes an increase in latency and error rates in all - task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store - and serve tasks efficiently. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.CreateTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.CreateTaskRequest): - request = cloudtasks.CreateTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_task(self, - request: cloudtasks.DeleteTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has completed - successfully or permanently failed. - - Args: - request (google.cloud.tasks_v2beta2.types.DeleteTaskRequest): - The request object. Request message for deleting a task - using - [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.DeleteTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.DeleteTaskRequest): - request = cloudtasks.DeleteTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def lease_tasks(self, - request: cloudtasks.LeaseTasksRequest = None, - *, - parent: str = None, - lease_duration: duration_pb2.Duration = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudtasks.LeaseTasksResponse: - r"""Leases tasks from a pull queue for - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. - - This method is invoked by the worker to obtain a lease. The - worker must acknowledge the task via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] - after they have performed the work associated with the task. - - The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is - intended to store data that the worker needs to perform the work - associated with the task. To return the payloads in the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set - [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] - to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. - - A maximum of 10 qps of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - requests are allowed per queue. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is - returned when this limit is exceeded. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also - returned when - [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] - is exceeded. - - Args: - request (google.cloud.tasks_v2beta2.types.LeaseTasksRequest): - The request object. Request message for leasing tasks - using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lease_duration (google.protobuf.duration_pb2.Duration): - Required. The duration of the lease. - - Each task returned in the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] - will have its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - set to the current time plus the ``lease_duration``. The - task is leased until its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; - thus, the task will not be returned to another - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call before its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - - After the worker has successfully finished the work - associated with the task, the worker must call via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] - before the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - Otherwise the task will be returned to a later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call so that another worker can retry it. - - The maximum lease duration is 1 week. ``lease_duration`` - will be truncated to the nearest second. - - This corresponds to the ``lease_duration`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.LeaseTasksResponse: - Response message for leasing tasks using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, lease_duration]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.LeaseTasksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.LeaseTasksRequest): - request = cloudtasks.LeaseTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lease_duration is not None: - request.lease_duration = lease_duration - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.lease_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def acknowledge_task(self, - request: cloudtasks.AcknowledgeTaskRequest = None, - *, - name: str = None, - schedule_time: timestamp_pb2.Timestamp = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Acknowledges a pull task. - - The worker, that is, the entity that - [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this - task must call this method to indicate that the work associated - with the task has finished. - - The worker must acknowledge a task within the - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] - or the lease will expire and the task will become available to - be leased again. After the task is acknowledged, it will not be - returned by a later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Args: - request (google.cloud.tasks_v2beta2.types.AcknowledgeTaskRequest): - The request object. Request message for acknowledging a - task using - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The task's current schedule time, available in - the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.AcknowledgeTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.AcknowledgeTaskRequest): - request = cloudtasks.AcknowledgeTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.acknowledge_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def renew_lease(self, - request: cloudtasks.RenewLeaseRequest = None, - *, - name: str = None, - schedule_time: timestamp_pb2.Timestamp = None, - lease_duration: duration_pb2.Duration = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Renew the current lease of a pull task. - - The worker can use this method to extend the lease by a new - duration, starting from now. The new task lease will be returned - in the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - - Args: - request (google.cloud.tasks_v2beta2.types.RenewLeaseRequest): - The request object. Request message for renewing a lease - using - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The task's current schedule time, available in - the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lease_duration (google.protobuf.duration_pb2.Duration): - Required. The desired new lease duration, starting from - now. - - The maximum lease duration is 1 week. ``lease_duration`` - will be truncated to the nearest second. - - This corresponds to the ``lease_duration`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schedule_time, lease_duration]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.RenewLeaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.RenewLeaseRequest): - request = cloudtasks.RenewLeaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if schedule_time is not None: - request.schedule_time = schedule_time - if lease_duration is not None: - request.lease_duration = lease_duration - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.renew_lease] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_lease(self, - request: cloudtasks.CancelLeaseRequest = None, - *, - name: str = None, - schedule_time: timestamp_pb2.Timestamp = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Cancel a pull task's lease. - - The worker can use this method to cancel a task's lease by - setting its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - to now. This will make the task available to be leased to the - next caller of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Args: - request (google.cloud.tasks_v2beta2.types.CancelLeaseRequest): - The request object. Request message for canceling a - lease using - [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The task's current schedule time, available in - the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - - This corresponds to the ``schedule_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schedule_time]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.CancelLeaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.CancelLeaseRequest): - request = cloudtasks.CancelLeaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if schedule_time is not None: - request.schedule_time = schedule_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_lease] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_task(self, - request: cloudtasks.RunTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the - [status][google.cloud.tasks.v2beta2.Task.status] after the task - is dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot - be called on a [pull - task][google.cloud.tasks.v2beta2.PullMessage]. - - Args: - request (google.cloud.tasks_v2beta2.types.RunTaskRequest): - The request object. Request message for forcing a task - to run now using - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta2.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.RunTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.RunTaskRequest): - request = cloudtasks.RunTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-tasks", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "CloudTasksClient", -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py deleted file mode 100644 index e2e75405..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.tasks_v2beta2.types import cloudtasks -from google.cloud.tasks_v2beta2.types import queue -from google.cloud.tasks_v2beta2.types import task - - -class ListQueuesPager: - """A pager for iterating through ``list_queues`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``queues`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListQueues`` requests and continue to iterate - through the ``queues`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudtasks.ListQueuesResponse], - request: cloudtasks.ListQueuesRequest, - response: cloudtasks.ListQueuesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): - The initial request object. - response (google.cloud.tasks_v2beta2.types.ListQueuesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListQueuesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[queue.Queue]: - for page in self.pages: - yield from page.queues - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListQueuesAsyncPager: - """A pager for iterating through ``list_queues`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``queues`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListQueues`` requests and continue to iterate - through the ``queues`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], - request: cloudtasks.ListQueuesRequest, - response: cloudtasks.ListQueuesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): - The initial request object. - response (google.cloud.tasks_v2beta2.types.ListQueuesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListQueuesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[queue.Queue]: - async def async_generator(): - async for page in self.pages: - for response in page.queues: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudtasks.ListTasksResponse], - request: cloudtasks.ListTasksRequest, - response: cloudtasks.ListTasksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta2.types.ListTasksRequest): - The initial request object. - response (google.cloud.tasks_v2beta2.types.ListTasksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListTasksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[task.Task]: - for page in self.pages: - yield from page.tasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksAsyncPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], - request: cloudtasks.ListTasksRequest, - response: cloudtasks.ListTasksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta2.types.ListTasksRequest): - The initial request object. - response (google.cloud.tasks_v2beta2.types.ListTasksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListTasksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[task.Task]: - async def async_generator(): - async for page in self.pages: - for response in page.tasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py deleted file mode 100644 index 3db96829..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudTasksTransport -from .grpc import CloudTasksGrpcTransport -from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] -_transport_registry['grpc'] = CloudTasksGrpcTransport -_transport_registry['grpc_asyncio'] = CloudTasksGrpcAsyncIOTransport - -__all__ = ( - 'CloudTasksTransport', - 'CloudTasksGrpcTransport', - 'CloudTasksGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py deleted file mode 100644 index 1e023912..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py +++ /dev/null @@ -1,497 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2beta2.types import cloudtasks -from google.cloud.tasks_v2beta2.types import queue -from google.cloud.tasks_v2beta2.types import queue as gct_queue -from google.cloud.tasks_v2beta2.types import task -from google.cloud.tasks_v2beta2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-tasks', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class CloudTasksTransport(abc.ABC): - """Abstract transport class for CloudTasks.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudtasks.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_queues: gapic_v1.method.wrap_method( - self.list_queues, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_queue: gapic_v1.method.wrap_method( - self.get_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.create_queue: gapic_v1.method.wrap_method( - self.create_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.update_queue: gapic_v1.method.wrap_method( - self.update_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.delete_queue: gapic_v1.method.wrap_method( - self.delete_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.purge_queue: gapic_v1.method.wrap_method( - self.purge_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.pause_queue: gapic_v1.method.wrap_method( - self.pause_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.resume_queue: gapic_v1.method.wrap_method( - self.resume_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=20.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_tasks: gapic_v1.method.wrap_method( - self.list_tasks, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_task: gapic_v1.method.wrap_method( - self.get_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.create_task: gapic_v1.method.wrap_method( - self.create_task, - default_timeout=20.0, - client_info=client_info, - ), - self.delete_task: gapic_v1.method.wrap_method( - self.delete_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.lease_tasks: gapic_v1.method.wrap_method( - self.lease_tasks, - default_timeout=20.0, - client_info=client_info, - ), - self.acknowledge_task: gapic_v1.method.wrap_method( - self.acknowledge_task, - default_timeout=20.0, - client_info=client_info, - ), - self.renew_lease: gapic_v1.method.wrap_method( - self.renew_lease, - default_timeout=20.0, - client_info=client_info, - ), - self.cancel_lease: gapic_v1.method.wrap_method( - self.cancel_lease, - default_timeout=20.0, - client_info=client_info, - ), - self.run_task: gapic_v1.method.wrap_method( - self.run_task, - default_timeout=20.0, - client_info=client_info, - ), - } - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - Union[ - cloudtasks.ListQueuesResponse, - Awaitable[cloudtasks.ListQueuesResponse] - ]]: - raise NotImplementedError() - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - Union[ - gct_queue.Queue, - Awaitable[gct_queue.Queue] - ]]: - raise NotImplementedError() - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - Union[ - gct_queue.Queue, - Awaitable[gct_queue.Queue] - ]]: - raise NotImplementedError() - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - Union[ - cloudtasks.ListTasksResponse, - Awaitable[cloudtasks.ListTasksResponse] - ]]: - raise NotImplementedError() - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - Union[ - gct_task.Task, - Awaitable[gct_task.Task] - ]]: - raise NotImplementedError() - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def lease_tasks(self) -> Callable[ - [cloudtasks.LeaseTasksRequest], - Union[ - cloudtasks.LeaseTasksResponse, - Awaitable[cloudtasks.LeaseTasksResponse] - ]]: - raise NotImplementedError() - - @property - def acknowledge_task(self) -> Callable[ - [cloudtasks.AcknowledgeTaskRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def renew_lease(self) -> Callable[ - [cloudtasks.RenewLeaseRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - @property - def cancel_lease(self) -> Callable[ - [cloudtasks.CancelLeaseRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'CloudTasksTransport', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py deleted file mode 100644 index 252400e8..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py +++ /dev/null @@ -1,942 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.tasks_v2beta2.types import cloudtasks -from google.cloud.tasks_v2beta2.types import queue -from google.cloud.tasks_v2beta2.types import queue as gct_queue -from google.cloud.tasks_v2beta2.types import task -from google.cloud.tasks_v2beta2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO - - -class CloudTasksGrpcTransport(CloudTasksTransport): - """gRPC backend transport for CloudTasks. - - Cloud Tasks allows developers to manage the execution of - background work in their applications. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - cloudtasks.ListQueuesResponse]: - r"""Return a callable for the list queues method over gRPC. - - Lists queues. - Queues are returned in lexicographical order. - - Returns: - Callable[[~.ListQueuesRequest], - ~.ListQueuesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_queues' not in self._stubs: - self._stubs['list_queues'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/ListQueues', - request_serializer=cloudtasks.ListQueuesRequest.serialize, - response_deserializer=cloudtasks.ListQueuesResponse.deserialize, - ) - return self._stubs['list_queues'] - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - queue.Queue]: - r"""Return a callable for the get queue method over gRPC. - - Gets a queue. - - Returns: - Callable[[~.GetQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_queue' not in self._stubs: - self._stubs['get_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/GetQueue', - request_serializer=cloudtasks.GetQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['get_queue'] - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - gct_queue.Queue]: - r"""Return a callable for the create queue method over gRPC. - - Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.CreateQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_queue' not in self._stubs: - self._stubs['create_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue', - request_serializer=cloudtasks.CreateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['create_queue'] - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - gct_queue.Queue]: - r"""Return a callable for the update queue method over gRPC. - - Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.UpdateQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_queue' not in self._stubs: - self._stubs['update_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue', - request_serializer=cloudtasks.UpdateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['update_queue'] - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete queue method over gRPC. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.DeleteQueueRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_queue' not in self._stubs: - self._stubs['delete_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue', - request_serializer=cloudtasks.DeleteQueueRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_queue'] - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - queue.Queue]: - r"""Return a callable for the purge queue method over gRPC. - - Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Returns: - Callable[[~.PurgeQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'purge_queue' not in self._stubs: - self._stubs['purge_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue', - request_serializer=cloudtasks.PurgeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['purge_queue'] - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - queue.Queue]: - r"""Return a callable for the pause queue method over gRPC. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - Returns: - Callable[[~.PauseQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pause_queue' not in self._stubs: - self._stubs['pause_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue', - request_serializer=cloudtasks.PauseQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['pause_queue'] - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - queue.Queue]: - r"""Return a callable for the resume queue method over gRPC. - - Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta2.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Returns: - Callable[[~.ResumeQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_queue' not in self._stubs: - self._stubs['resume_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue', - request_serializer=cloudtasks.ResumeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['resume_queue'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - cloudtasks.ListTasksResponse]: - r"""Return a callable for the list tasks method over gRPC. - - Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Returns: - Callable[[~.ListTasksRequest], - ~.ListTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/ListTasks', - request_serializer=cloudtasks.ListTasksRequest.serialize, - response_deserializer=cloudtasks.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - task.Task]: - r"""Return a callable for the get task method over gRPC. - - Gets a task. - - Returns: - Callable[[~.GetTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/GetTask', - request_serializer=cloudtasks.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - gct_task.Task]: - r"""Return a callable for the create task method over gRPC. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - For [App Engine - queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the - maximum task size is 100KB. - - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the - maximum task size is 1MB. - - Returns: - Callable[[~.CreateTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/CreateTask', - request_serializer=cloudtasks.CreateTaskRequest.serialize, - response_deserializer=gct_task.Task.deserialize, - ) - return self._stubs['create_task'] - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete task method over gRPC. - - Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has completed - successfully or permanently failed. - - Returns: - Callable[[~.DeleteTaskRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask', - request_serializer=cloudtasks.DeleteTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_task'] - - @property - def lease_tasks(self) -> Callable[ - [cloudtasks.LeaseTasksRequest], - cloudtasks.LeaseTasksResponse]: - r"""Return a callable for the lease tasks method over gRPC. - - Leases tasks from a pull queue for - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. - - This method is invoked by the worker to obtain a lease. The - worker must acknowledge the task via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] - after they have performed the work associated with the task. - - The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is - intended to store data that the worker needs to perform the work - associated with the task. To return the payloads in the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set - [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] - to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. - - A maximum of 10 qps of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - requests are allowed per queue. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is - returned when this limit is exceeded. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also - returned when - [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] - is exceeded. - - Returns: - Callable[[~.LeaseTasksRequest], - ~.LeaseTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lease_tasks' not in self._stubs: - self._stubs['lease_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks', - request_serializer=cloudtasks.LeaseTasksRequest.serialize, - response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, - ) - return self._stubs['lease_tasks'] - - @property - def acknowledge_task(self) -> Callable[ - [cloudtasks.AcknowledgeTaskRequest], - empty_pb2.Empty]: - r"""Return a callable for the acknowledge task method over gRPC. - - Acknowledges a pull task. - - The worker, that is, the entity that - [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this - task must call this method to indicate that the work associated - with the task has finished. - - The worker must acknowledge a task within the - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] - or the lease will expire and the task will become available to - be leased again. After the task is acknowledged, it will not be - returned by a later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Returns: - Callable[[~.AcknowledgeTaskRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'acknowledge_task' not in self._stubs: - self._stubs['acknowledge_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask', - request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['acknowledge_task'] - - @property - def renew_lease(self) -> Callable[ - [cloudtasks.RenewLeaseRequest], - task.Task]: - r"""Return a callable for the renew lease method over gRPC. - - Renew the current lease of a pull task. - - The worker can use this method to extend the lease by a new - duration, starting from now. The new task lease will be returned - in the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - - Returns: - Callable[[~.RenewLeaseRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'renew_lease' not in self._stubs: - self._stubs['renew_lease'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/RenewLease', - request_serializer=cloudtasks.RenewLeaseRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['renew_lease'] - - @property - def cancel_lease(self) -> Callable[ - [cloudtasks.CancelLeaseRequest], - task.Task]: - r"""Return a callable for the cancel lease method over gRPC. - - Cancel a pull task's lease. - - The worker can use this method to cancel a task's lease by - setting its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - to now. This will make the task available to be leased to the - next caller of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Returns: - Callable[[~.CancelLeaseRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_lease' not in self._stubs: - self._stubs['cancel_lease'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/CancelLease', - request_serializer=cloudtasks.CancelLeaseRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['cancel_lease'] - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - task.Task]: - r"""Return a callable for the run task method over gRPC. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the - [status][google.cloud.tasks.v2beta2.Task.status] after the task - is dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot - be called on a [pull - task][google.cloud.tasks.v2beta2.PullMessage]. - - Returns: - Callable[[~.RunTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/RunTask', - request_serializer=cloudtasks.RunTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['run_task'] - - -__all__ = ( - 'CloudTasksGrpcTransport', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py deleted file mode 100644 index b427ac44..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py +++ /dev/null @@ -1,946 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.tasks_v2beta2.types import cloudtasks -from google.cloud.tasks_v2beta2.types import queue -from google.cloud.tasks_v2beta2.types import queue as gct_queue -from google.cloud.tasks_v2beta2.types import task -from google.cloud.tasks_v2beta2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .grpc import CloudTasksGrpcTransport - - -class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): - """gRPC AsyncIO backend transport for CloudTasks. - - Cloud Tasks allows developers to manage the execution of - background work in their applications. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - Awaitable[cloudtasks.ListQueuesResponse]]: - r"""Return a callable for the list queues method over gRPC. - - Lists queues. - Queues are returned in lexicographical order. - - Returns: - Callable[[~.ListQueuesRequest], - Awaitable[~.ListQueuesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_queues' not in self._stubs: - self._stubs['list_queues'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/ListQueues', - request_serializer=cloudtasks.ListQueuesRequest.serialize, - response_deserializer=cloudtasks.ListQueuesResponse.deserialize, - ) - return self._stubs['list_queues'] - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the get queue method over gRPC. - - Gets a queue. - - Returns: - Callable[[~.GetQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_queue' not in self._stubs: - self._stubs['get_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/GetQueue', - request_serializer=cloudtasks.GetQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['get_queue'] - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - Awaitable[gct_queue.Queue]]: - r"""Return a callable for the create queue method over gRPC. - - Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.CreateQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_queue' not in self._stubs: - self._stubs['create_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue', - request_serializer=cloudtasks.CreateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['create_queue'] - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - Awaitable[gct_queue.Queue]]: - r"""Return a callable for the update queue method over gRPC. - - Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.UpdateQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_queue' not in self._stubs: - self._stubs['update_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue', - request_serializer=cloudtasks.UpdateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['update_queue'] - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete queue method over gRPC. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.DeleteQueueRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_queue' not in self._stubs: - self._stubs['delete_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue', - request_serializer=cloudtasks.DeleteQueueRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_queue'] - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the purge queue method over gRPC. - - Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Returns: - Callable[[~.PurgeQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'purge_queue' not in self._stubs: - self._stubs['purge_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue', - request_serializer=cloudtasks.PurgeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['purge_queue'] - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the pause queue method over gRPC. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - Returns: - Callable[[~.PauseQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pause_queue' not in self._stubs: - self._stubs['pause_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue', - request_serializer=cloudtasks.PauseQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['pause_queue'] - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the resume queue method over gRPC. - - Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta2.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Returns: - Callable[[~.ResumeQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_queue' not in self._stubs: - self._stubs['resume_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue', - request_serializer=cloudtasks.ResumeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['resume_queue'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - Awaitable[cloudtasks.ListTasksResponse]]: - r"""Return a callable for the list tasks method over gRPC. - - Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Returns: - Callable[[~.ListTasksRequest], - Awaitable[~.ListTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/ListTasks', - request_serializer=cloudtasks.ListTasksRequest.serialize, - response_deserializer=cloudtasks.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the get task method over gRPC. - - Gets a task. - - Returns: - Callable[[~.GetTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/GetTask', - request_serializer=cloudtasks.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - Awaitable[gct_task.Task]]: - r"""Return a callable for the create task method over gRPC. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - For [App Engine - queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the - maximum task size is 100KB. - - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the - maximum task size is 1MB. - - Returns: - Callable[[~.CreateTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/CreateTask', - request_serializer=cloudtasks.CreateTaskRequest.serialize, - response_deserializer=gct_task.Task.deserialize, - ) - return self._stubs['create_task'] - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete task method over gRPC. - - Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has completed - successfully or permanently failed. - - Returns: - Callable[[~.DeleteTaskRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask', - request_serializer=cloudtasks.DeleteTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_task'] - - @property - def lease_tasks(self) -> Callable[ - [cloudtasks.LeaseTasksRequest], - Awaitable[cloudtasks.LeaseTasksResponse]]: - r"""Return a callable for the lease tasks method over gRPC. - - Leases tasks from a pull queue for - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. - - This method is invoked by the worker to obtain a lease. The - worker must acknowledge the task via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] - after they have performed the work associated with the task. - - The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is - intended to store data that the worker needs to perform the work - associated with the task. To return the payloads in the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set - [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] - to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. - - A maximum of 10 qps of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - requests are allowed per queue. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is - returned when this limit is exceeded. - [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also - returned when - [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] - is exceeded. - - Returns: - Callable[[~.LeaseTasksRequest], - Awaitable[~.LeaseTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lease_tasks' not in self._stubs: - self._stubs['lease_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks', - request_serializer=cloudtasks.LeaseTasksRequest.serialize, - response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, - ) - return self._stubs['lease_tasks'] - - @property - def acknowledge_task(self) -> Callable[ - [cloudtasks.AcknowledgeTaskRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the acknowledge task method over gRPC. - - Acknowledges a pull task. - - The worker, that is, the entity that - [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this - task must call this method to indicate that the work associated - with the task has finished. - - The worker must acknowledge a task within the - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] - or the lease will expire and the task will become available to - be leased again. After the task is acknowledged, it will not be - returned by a later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Returns: - Callable[[~.AcknowledgeTaskRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'acknowledge_task' not in self._stubs: - self._stubs['acknowledge_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask', - request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['acknowledge_task'] - - @property - def renew_lease(self) -> Callable[ - [cloudtasks.RenewLeaseRequest], - Awaitable[task.Task]]: - r"""Return a callable for the renew lease method over gRPC. - - Renew the current lease of a pull task. - - The worker can use this method to extend the lease by a new - duration, starting from now. The new task lease will be returned - in the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - - Returns: - Callable[[~.RenewLeaseRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'renew_lease' not in self._stubs: - self._stubs['renew_lease'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/RenewLease', - request_serializer=cloudtasks.RenewLeaseRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['renew_lease'] - - @property - def cancel_lease(self) -> Callable[ - [cloudtasks.CancelLeaseRequest], - Awaitable[task.Task]]: - r"""Return a callable for the cancel lease method over gRPC. - - Cancel a pull task's lease. - - The worker can use this method to cancel a task's lease by - setting its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - to now. This will make the task available to be leased to the - next caller of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Returns: - Callable[[~.CancelLeaseRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_lease' not in self._stubs: - self._stubs['cancel_lease'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/CancelLease', - request_serializer=cloudtasks.CancelLeaseRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['cancel_lease'] - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the run task method over gRPC. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the - [status][google.cloud.tasks.v2beta2.Task.status] after the task - is dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot - be called on a [pull - task][google.cloud.tasks.v2beta2.PullMessage]. - - Returns: - Callable[[~.RunTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta2.CloudTasks/RunTask', - request_serializer=cloudtasks.RunTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['run_task'] - - -__all__ = ( - 'CloudTasksGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py deleted file mode 100644 index 06717c3d..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/__init__.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloudtasks import ( - AcknowledgeTaskRequest, - CancelLeaseRequest, - CreateQueueRequest, - CreateTaskRequest, - DeleteQueueRequest, - DeleteTaskRequest, - GetQueueRequest, - GetTaskRequest, - LeaseTasksRequest, - LeaseTasksResponse, - ListQueuesRequest, - ListQueuesResponse, - ListTasksRequest, - ListTasksResponse, - PauseQueueRequest, - PurgeQueueRequest, - RenewLeaseRequest, - ResumeQueueRequest, - RunTaskRequest, - UpdateQueueRequest, -) -from .queue import ( - Queue, - QueueStats, - RateLimits, - RetryConfig, -) -from .target import ( - AppEngineHttpRequest, - AppEngineHttpTarget, - AppEngineRouting, - PullMessage, - PullTarget, - HttpMethod, -) -from .task import ( - AttemptStatus, - Task, - TaskStatus, -) - -__all__ = ( - 'AcknowledgeTaskRequest', - 'CancelLeaseRequest', - 'CreateQueueRequest', - 'CreateTaskRequest', - 'DeleteQueueRequest', - 'DeleteTaskRequest', - 'GetQueueRequest', - 'GetTaskRequest', - 'LeaseTasksRequest', - 'LeaseTasksResponse', - 'ListQueuesRequest', - 'ListQueuesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'PauseQueueRequest', - 'PurgeQueueRequest', - 'RenewLeaseRequest', - 'ResumeQueueRequest', - 'RunTaskRequest', - 'UpdateQueueRequest', - 'Queue', - 'QueueStats', - 'RateLimits', - 'RetryConfig', - 'AppEngineHttpRequest', - 'AppEngineHttpTarget', - 'AppEngineRouting', - 'PullMessage', - 'PullTarget', - 'HttpMethod', - 'AttemptStatus', - 'Task', - 'TaskStatus', -) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py deleted file mode 100644 index 7090aa05..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/cloudtasks.py +++ /dev/null @@ -1,869 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2beta2.types import queue as gct_queue -from google.cloud.tasks_v2beta2.types import task as gct_task -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta2', - manifest={ - 'ListQueuesRequest', - 'ListQueuesResponse', - 'GetQueueRequest', - 'CreateQueueRequest', - 'UpdateQueueRequest', - 'DeleteQueueRequest', - 'PurgeQueueRequest', - 'PauseQueueRequest', - 'ResumeQueueRequest', - 'ListTasksRequest', - 'ListTasksResponse', - 'GetTaskRequest', - 'CreateTaskRequest', - 'DeleteTaskRequest', - 'LeaseTasksRequest', - 'LeaseTasksResponse', - 'AcknowledgeTaskRequest', - 'RenewLeaseRequest', - 'CancelLeaseRequest', - 'RunTaskRequest', - }, -) - - -class ListQueuesRequest(proto.Message): - r"""Request message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - - Attributes: - parent (str): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter (str): - ``filter`` can be used to specify a subset of queues. Any - [Queue][google.cloud.tasks.v2beta2.Queue] field can be used - as a filter and several operators as supported. For example: - ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver's Advanced Logs - Filters `__. - - Sample filter "app_engine_http_target: \*". - - Note that using filters might cause fewer queues than the - requested_page size to be returned. - page_size (int): - Requested page size. - - The maximum page size is 9800. If unspecified, the page size - will be the maximum. Fewer queues than requested might be - returned, even if more queues exist; use the - [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] - in the response to determine if more queues exist. - page_token (str): - A token identifying the page of results to return. - - To request the first page results, page_token must be empty. - To request the next page of results, page_token must be the - value of - [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] - returned from the previous call to - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] - method. It is an error to switch the value of the - [filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter] - while iterating through pages. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Read mask is used for a more granular control over - what the API returns. If the mask is not present all fields - will be returned except [Queue.stats]. [Queue.stats] will be - returned only if it was explicitly specified in the mask. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask_pb2.FieldMask, - ) - - -class ListQueuesResponse(proto.Message): - r"""Response message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - - Attributes: - queues (Sequence[google.cloud.tasks_v2beta2.types.Queue]): - The list of queues. - next_page_token (str): - A token to retrieve next page of results. - - To return the next page of results, call - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] - with this value as the - [page_token][google.cloud.tasks.v2beta2.ListQueuesRequest.page_token]. - - If the next_page_token is empty, there are no more results. - - The page token is valid for only 2 hours. - """ - - @property - def raw_page(self): - return self - - queues = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gct_queue.Queue, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetQueueRequest(proto.Message): - r"""Request message for - [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. - - Attributes: - name (str): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Read mask is used for a more granular control over - what the API returns. If the mask is not present all fields - will be returned except [Queue.stats]. [Queue.stats] will be - returned only if it was explicitly specified in the mask. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - read_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class CreateQueueRequest(proto.Message): - r"""Request message for - [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. - - Attributes: - parent (str): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - queue (google.cloud.tasks_v2beta2.types.Queue): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2beta2.Queue.name] cannot - be the same as an existing queue. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - queue = proto.Field( - proto.MESSAGE, - number=2, - message=gct_queue.Queue, - ) - - -class UpdateQueueRequest(proto.Message): - r"""Request message for - [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. - - Attributes: - queue (google.cloud.tasks_v2beta2.types.Queue): - Required. The queue to create or update. - - The queue's [name][google.cloud.tasks.v2beta2.Queue.name] - must be specified. - - Output only fields cannot be modified using UpdateQueue. Any - value specified for an output only field will be ignored. - The queue's [name][google.cloud.tasks.v2beta2.Queue.name] - cannot be changed. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask used to specify which fields of the - queue are being updated. - If empty, then all fields will be updated. - """ - - queue = proto.Field( - proto.MESSAGE, - number=1, - message=gct_queue.Queue, - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteQueueRequest(proto.Message): - r"""Request message for - [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class PurgeQueueRequest(proto.Message): - r"""Request message for - [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class PauseQueueRequest(proto.Message): - r"""Request message for - [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ResumeQueueRequest(proto.Message): - r"""Request message for - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTasksRequest(proto.Message): - r"""Request message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Attributes: - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (google.cloud.tasks_v2beta2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - page_size (int): - Maximum page size. - - Fewer tasks than requested might be returned, even if more - tasks exist; use - [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] - in the response to determine if more tasks exist. - - The maximum page size is 1000. If unspecified, the page size - will be the maximum. - page_token (str): - A token identifying the page of results to return. - - To request the first page results, page_token must be empty. - To request the next page of results, page_token must be the - value of - [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] - returned from the previous call to - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] - method. - - The page token is valid for only 2 hours. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - page_size = proto.Field( - proto.INT32, - number=4, - ) - page_token = proto.Field( - proto.STRING, - number=5, - ) - - -class ListTasksResponse(proto.Message): - r"""Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Attributes: - tasks (Sequence[google.cloud.tasks_v2beta2.types.Task]): - The list of tasks. - next_page_token (str): - A token to retrieve next page of results. - - To return the next page of results, call - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] - with this value as the - [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. - - If the next_page_token is empty, there are no more results. - """ - - @property - def raw_page(self): - return self - - tasks = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gct_task.Task, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetTaskRequest(proto.Message): - r"""Request message for getting a task using - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (google.cloud.tasks_v2beta2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - - -class CreateTaskRequest(proto.Message): - r"""Request message for - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - - Attributes: - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - task (google.cloud.tasks_v2beta2.types.Task): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2beta2.Task.name]. If a name is - not specified then the system will generate a random unique - task id, which will be set in the task returned in the - [response][google.cloud.tasks.v2beta2.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set it to - the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task de-duplication. - If a task's ID is identical to that of an existing task or a - task that was deleted or completed recently then the call - will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour after - the original task was deleted or completed. If the task's - queue was created using queue.yaml or queue.xml, then - another task with the same name can't be created for ~9days - after the original task was deleted or completed. - - Because there is an extra lookup cost to identify duplicate - task names, these - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have - sequential prefixes, for example using a timestamp, causes - an increase in latency and error rates in all task commands. - The infrastructure relies on an approximately uniform - distribution of task ids to store and serve tasks - efficiently. - response_view (google.cloud.tasks_v2beta2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - task = proto.Field( - proto.MESSAGE, - number=2, - message=gct_task.Task, - ) - response_view = proto.Field( - proto.ENUM, - number=3, - enum=gct_task.Task.View, - ) - - -class DeleteTaskRequest(proto.Message): - r"""Request message for deleting a task using - [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class LeaseTasksRequest(proto.Message): - r"""Request message for leasing tasks using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Attributes: - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - max_tasks (int): - The maximum number of tasks to lease. - - The system will make a best effort to return as close to as - ``max_tasks`` as possible. - - The largest that ``max_tasks`` can be is 1000. - - The maximum total size of a [lease tasks - response][google.cloud.tasks.v2beta2.LeaseTasksResponse] is - 32 MB. If the sum of all task sizes requested reaches this - limit, fewer tasks than requested are returned. - lease_duration (google.protobuf.duration_pb2.Duration): - Required. The duration of the lease. - - Each task returned in the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] - will have its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - set to the current time plus the ``lease_duration``. The - task is leased until its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; - thus, the task will not be returned to another - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call before its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - - After the worker has successfully finished the work - associated with the task, the worker must call via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] - before the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - Otherwise the task will be returned to a later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call so that another worker can retry it. - - The maximum lease duration is 1 week. ``lease_duration`` - will be truncated to the nearest second. - response_view (google.cloud.tasks_v2beta2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - filter (str): - ``filter`` can be used to specify a subset of tasks to - lease. - - When ``filter`` is set to ``tag=`` then the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] - will contain only tasks whose - [tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal - to ````. ```` must be less than 500 - characters. - - When ``filter`` is set to ``tag_function=oldest_tag()``, - only tasks which have the same tag as the task with the - oldest - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - will be returned. - - Grammar Syntax: - - - ``filter = "tag=" tag | "tag_function=" function`` - - - ``tag = string`` - - - ``function = "oldest_tag()"`` - - The ``oldest_tag()`` function returns tasks which have the - same tag as the oldest task (ordered by schedule time). - - SDK compatibility: Although the SDK allows tags to be either - string or - `bytes `__, - only UTF-8 encoded tags can be used in Cloud Tasks. Tag - which aren't UTF-8 encoded can't be used in the - [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter] - and the task's - [tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be - displayed as empty in Cloud Tasks. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - max_tasks = proto.Field( - proto.INT32, - number=2, - ) - lease_duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - response_view = proto.Field( - proto.ENUM, - number=4, - enum=gct_task.Task.View, - ) - filter = proto.Field( - proto.STRING, - number=5, - ) - - -class LeaseTasksResponse(proto.Message): - r"""Response message for leasing tasks using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Attributes: - tasks (Sequence[google.cloud.tasks_v2beta2.types.Task]): - The leased tasks. - """ - - tasks = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gct_task.Task, - ) - - -class AcknowledgeTaskRequest(proto.Message): - r"""Request message for acknowledging a task using - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The task's current schedule time, available in the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - schedule_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class RenewLeaseRequest(proto.Message): - r"""Request message for renewing a lease using - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The task's current schedule time, available in the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - lease_duration (google.protobuf.duration_pb2.Duration): - Required. The desired new lease duration, starting from now. - - The maximum lease duration is 1 week. ``lease_duration`` - will be truncated to the nearest second. - response_view (google.cloud.tasks_v2beta2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - schedule_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - lease_duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - response_view = proto.Field( - proto.ENUM, - number=4, - enum=gct_task.Task.View, - ) - - -class CancelLeaseRequest(proto.Message): - r"""Request message for canceling a lease using - [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The task's current schedule time, available in the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - response_view (google.cloud.tasks_v2beta2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - schedule_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - response_view = proto.Field( - proto.ENUM, - number=3, - enum=gct_task.Task.View, - ) - - -class RunTaskRequest(proto.Message): - r"""Request message for forcing a task to run now using - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (google.cloud.tasks_v2beta2.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py deleted file mode 100644 index 7473bc18..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/queue.py +++ /dev/null @@ -1,530 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2beta2.types import target -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta2', - manifest={ - 'Queue', - 'RateLimits', - 'RetryConfig', - 'QueueStats', - }, -) - - -class Queue(proto.Message): - r"""A queue is a container of related tasks. Queues are - configured to manage how those tasks are dispatched. - Configurable properties include rate limits, retry options, - target types, and others. - - Attributes: - name (str): - Caller-specified and required in - [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue], - after which it becomes output only. - - The queue name. - - The queue name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For - more information, see `Identifying - projects `__ - - ``LOCATION_ID`` is the canonical ID for the queue's - location. The list of available locations can be obtained - by calling - [ListLocations][google.cloud.location.Locations.ListLocations]. - For more information, see - https://cloud.google.com/about/locations/. - - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), or hyphens (-). The maximum length is 100 - characters. - app_engine_http_target (google.cloud.tasks_v2beta2.types.AppEngineHttpTarget): - App Engine HTTP target. - - An App Engine queue is a queue that has an - [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget]. - pull_target (google.cloud.tasks_v2beta2.types.PullTarget): - Pull target. - - A pull queue is a queue that has a - [PullTarget][google.cloud.tasks.v2beta2.PullTarget]. - rate_limits (google.cloud.tasks_v2beta2.types.RateLimits): - Rate limits for task dispatches. - - [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] - and - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] - are related because they both control task attempts however - they control how tasks are attempted in different ways: - - - [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] - controls the total rate of dispatches from a queue (i.e. - all traffic dispatched from the queue, regardless of - whether the dispatch is from a first attempt or a retry). - - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] - controls what happens to particular a task after its - first attempt fails. That is, - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] - controls task retries (the second attempt, third attempt, - etc). - retry_config (google.cloud.tasks_v2beta2.types.RetryConfig): - Settings that determine the retry behavior. - - - For tasks created using Cloud Tasks: the queue-level - retry settings apply to all tasks in the queue that were - created using Cloud Tasks. Retry settings cannot be set - on individual tasks. - - For tasks created using the App Engine SDK: the - queue-level retry settings apply to all tasks in the - queue which do not have retry settings explicitly set on - the task and were created by the App Engine SDK. See `App - Engine - documentation `__. - state (google.cloud.tasks_v2beta2.types.Queue.State): - Output only. The state of the queue. - - ``state`` can only be changed by calling - [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue], - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue], - or uploading - `queue.yaml/xml `__. - [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] - cannot be used to change ``state``. - purge_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last time this queue was purged. - - All tasks that were - [created][google.cloud.tasks.v2beta2.Task.create_time] - before this time were purged. - - A queue can be purged using - [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue], - the `App Engine Task Queue SDK, or the Cloud - Console `__. - - Purge time will be truncated to the nearest microsecond. - Purge time will be unset if the queue has never been purged. - task_ttl (google.protobuf.duration_pb2.Duration): - The maximum amount of time that a task will be retained in - this queue. - - Queues created by Cloud Tasks have a default ``task_ttl`` of - 31 days. After a task has lived for ``task_ttl``, the task - will be deleted regardless of whether it was dispatched or - not. - - The ``task_ttl`` for queues created via queue.yaml/xml is - equal to the maximum duration because there is a `storage - quota `__ - for these queues. To view the maximum valid duration, see - the documentation for [Duration][google.protobuf.Duration]. - tombstone_ttl (google.protobuf.duration_pb2.Duration): - The task tombstone time to live (TTL). - - After a task is deleted or completed, the task's tombstone - is retained for the length of time specified by - ``tombstone_ttl``. The tombstone is used by task - de-duplication; another task with the same name can't be - created until the tombstone has expired. For more - information about task de-duplication, see the documentation - for - [CreateTaskRequest][google.cloud.tasks.v2beta2.CreateTaskRequest.task]. - - Queues created by Cloud Tasks have a default - ``tombstone_ttl`` of 1 hour. - stats (google.cloud.tasks_v2beta2.types.QueueStats): - Output only. The realtime, informational - statistics for a queue. In order to receive the - statistics the caller should include this field - in the FieldMask. - """ - class State(proto.Enum): - r"""State of the queue.""" - STATE_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - DISABLED = 3 - - name = proto.Field( - proto.STRING, - number=1, - ) - app_engine_http_target = proto.Field( - proto.MESSAGE, - number=3, - oneof='target_type', - message=target.AppEngineHttpTarget, - ) - pull_target = proto.Field( - proto.MESSAGE, - number=4, - oneof='target_type', - message=target.PullTarget, - ) - rate_limits = proto.Field( - proto.MESSAGE, - number=5, - message='RateLimits', - ) - retry_config = proto.Field( - proto.MESSAGE, - number=6, - message='RetryConfig', - ) - state = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - purge_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - task_ttl = proto.Field( - proto.MESSAGE, - number=9, - message=duration_pb2.Duration, - ) - tombstone_ttl = proto.Field( - proto.MESSAGE, - number=10, - message=duration_pb2.Duration, - ) - stats = proto.Field( - proto.MESSAGE, - number=16, - message='QueueStats', - ) - - -class RateLimits(proto.Message): - r"""Rate limits. - - This message determines the maximum rate that tasks can be - dispatched by a queue, regardless of whether the dispatch is a first - task attempt or a retry. - - Note: The debugging command, - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a - task even if the queue has reached its - [RateLimits][google.cloud.tasks.v2beta2.RateLimits]. - - Attributes: - max_tasks_dispatched_per_second (float): - The maximum rate at which tasks are dispatched from this - queue. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - - For [App Engine - queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], - the maximum allowed value is 500. - - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. In - addition to the ``max_tasks_dispatched_per_second`` - limit, a maximum of 10 QPS of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - requests are allowed per pull queue. - - This field has the same meaning as `rate in - queue.yaml/xml `__. - max_burst_size (int): - The max burst size. - - Max burst size limits how fast tasks in queue are processed - when many tasks are in the queue and the rate is high. This - field allows the queue to have a high rate so processing - starts shortly after a task is enqueued, but still limits - resource usage when many tasks are enqueued in a short - period of time. - - The `token - bucket `__ - algorithm is used to control the rate of task dispatches. - Each queue has a token bucket that holds tokens, up to the - maximum specified by ``max_burst_size``. Each time a task is - dispatched, a token is removed from the bucket. Tasks will - be dispatched until the queue's bucket runs out of tokens. - The bucket will be continuously refilled with new tokens - based on - [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. - - The default value of ``max_burst_size`` is picked by Cloud - Tasks based on the value of - [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. - - The maximum value of ``max_burst_size`` is 500. - - For App Engine queues that were created or updated using - ``queue.yaml/xml``, ``max_burst_size`` is equal to - `bucket_size `__. - If - [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] - is called on a queue without explicitly setting a value for - ``max_burst_size``, ``max_burst_size`` value will get - updated if - [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] - is updating - [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. - max_concurrent_tasks (int): - The maximum number of concurrent tasks that Cloud Tasks - allows to be dispatched for this queue. After this threshold - has been reached, Cloud Tasks stops dispatching tasks until - the number of concurrent requests decreases. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - The maximum allowed value is 5,000. - - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget] and always - -1, which indicates no limit. No other queue types can have - ``max_concurrent_tasks`` set to -1. - - This field has the same meaning as `max_concurrent_requests - in - queue.yaml/xml `__. - """ - - max_tasks_dispatched_per_second = proto.Field( - proto.DOUBLE, - number=1, - ) - max_burst_size = proto.Field( - proto.INT32, - number=2, - ) - max_concurrent_tasks = proto.Field( - proto.INT32, - number=3, - ) - - -class RetryConfig(proto.Message): - r"""Retry config. - These settings determine how a failed task attempt is retried. - - Attributes: - max_attempts (int): - The maximum number of attempts for a task. - - Cloud Tasks will attempt the task ``max_attempts`` times - (that is, if the first attempt fails, then there will be - ``max_attempts - 1`` retries). Must be > 0. - unlimited_attempts (bool): - If true, then the number of attempts is - unlimited. - max_retry_duration (google.protobuf.duration_pb2.Duration): - If positive, ``max_retry_duration`` specifies the time limit - for retrying a failed task, measured from when the task was - first attempted. Once ``max_retry_duration`` time has passed - *and* the task has been attempted - [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] - times, no further attempts will be made and the task will be - deleted. - - If zero, then the task age is unlimited. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. - - ``max_retry_duration`` will be truncated to the nearest - second. - - This field has the same meaning as `task_age_limit in - queue.yaml/xml `__. - min_backoff (google.protobuf.duration_pb2.Duration): - A task will be - [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] - for retry between - [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] - duration after it fails, if the queue's - [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] - specifies that the task should be retried. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. - - ``min_backoff`` will be truncated to the nearest second. - - This field has the same meaning as `min_backoff_seconds in - queue.yaml/xml `__. - max_backoff (google.protobuf.duration_pb2.Duration): - A task will be - [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] - for retry between - [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] - duration after it fails, if the queue's - [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] - specifies that the task should be retried. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. - - ``max_backoff`` will be truncated to the nearest second. - - This field has the same meaning as `max_backoff_seconds in - queue.yaml/xml `__. - max_doublings (int): - The time between retries will double ``max_doublings`` - times. - - A task's retry interval starts at - [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff], - then doubles ``max_doublings`` times, then increases - linearly, and finally retries at intervals of - [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] - up to - [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] - times. - - For example, if - [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] - is 10s, - [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] - is 300s, and ``max_doublings`` is 3, then the a task will - first be retried in 10s. The retry interval will double - three times, and then increase linearly by 2^3 \* 10s. - Finally, the task will retry at intervals of - [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] - until the task has been attempted - [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] - times. Thus, the requests will retry at 10s, 20s, 40s, 80s, - 160s, 240s, 300s, 300s, .... - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. - - This field has the same meaning as `max_doublings in - queue.yaml/xml `__. - """ - - max_attempts = proto.Field( - proto.INT32, - number=1, - oneof='num_attempts', - ) - unlimited_attempts = proto.Field( - proto.BOOL, - number=2, - oneof='num_attempts', - ) - max_retry_duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - min_backoff = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - max_backoff = proto.Field( - proto.MESSAGE, - number=5, - message=duration_pb2.Duration, - ) - max_doublings = proto.Field( - proto.INT32, - number=6, - ) - - -class QueueStats(proto.Message): - r"""Statistics for a queue. - Attributes: - tasks_count (int): - Output only. An estimation of the number of - tasks in the queue, that is, the tasks in the - queue that haven't been executed, the tasks in - the queue which the queue has dispatched but has - not yet received a reply for, and the failed - tasks that the queue is retrying. - oldest_estimated_arrival_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. An estimation of the nearest - time in the future where a task in the queue is - scheduled to be executed. - executed_last_minute_count (int): - Output only. The number of tasks that the - queue has dispatched and received a reply for - during the last minute. This variable counts - both successful and non-successful executions. - concurrent_dispatches_count (int): - Output only. The number of requests that the - queue has dispatched but has not received a - reply for yet. - effective_execution_rate (float): - Output only. The current maximum number of - tasks per second executed by the queue. The - maximum value of this variable is controlled by - the RateLimits of the Queue. However, this value - could be less to avoid overloading the endpoints - tasks in the queue are targeting. - """ - - tasks_count = proto.Field( - proto.INT64, - number=1, - ) - oldest_estimated_arrival_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - executed_last_minute_count = proto.Field( - proto.INT64, - number=3, - ) - concurrent_dispatches_count = proto.Field( - proto.INT64, - number=4, - ) - effective_execution_rate = proto.Field( - proto.DOUBLE, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py deleted file mode 100644 index fa3f9557..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/target.py +++ /dev/null @@ -1,487 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta2', - manifest={ - 'HttpMethod', - 'PullTarget', - 'PullMessage', - 'AppEngineHttpTarget', - 'AppEngineHttpRequest', - 'AppEngineRouting', - }, -) - - -class HttpMethod(proto.Enum): - r"""The HTTP method used to execute the task.""" - HTTP_METHOD_UNSPECIFIED = 0 - POST = 1 - GET = 2 - HEAD = 3 - PUT = 4 - DELETE = 5 - - -class PullTarget(proto.Message): - r"""Pull target. """ - - -class PullMessage(proto.Message): - r"""The pull message contains data that can be used by the caller of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] to - process the task. - - This proto can only be used for tasks in a queue which has - [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] set. - - Attributes: - payload (bytes): - A data payload consumed by the worker to - execute the task. - tag (str): - The task's tag. - - Tags allow similar tasks to be processed in a batch. If you - label tasks with a tag, your worker can [lease - tasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - with the same tag using - [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]. - For example, if you want to aggregate the events associated - with a specific user once a day, you could tag tasks with - the user ID. - - The task's tag can only be set when the [task is - created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - - The tag must be less than 500 characters. - - SDK compatibility: Although the SDK allows tags to be either - string or - `bytes `__, - only UTF-8 encoded tags can be used in Cloud Tasks. If a tag - isn't UTF-8 encoded, the tag will be empty when the task is - returned by Cloud Tasks. - """ - - payload = proto.Field( - proto.BYTES, - number=1, - ) - tag = proto.Field( - proto.STRING, - number=2, - ) - - -class AppEngineHttpTarget(proto.Message): - r"""App Engine HTTP target. - - The task will be delivered to the App Engine application hostname - specified by its - [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] - and - [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest]. - The documentation for - [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] - explains how the task's host URL is constructed. - - Using - [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] - requires - ```appengine.applications.get`` `__ - Google IAM permission for the project and the following scope: - - ``https://www.googleapis.com/auth/cloud-platform`` - - Attributes: - app_engine_routing_override (google.cloud.tasks_v2beta2.types.AppEngineRouting): - Overrides for the [task-level - app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. - - If set, ``app_engine_routing_override`` is used for all - tasks in the queue, no matter what the setting is for the - [task-level - app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. - """ - - app_engine_routing_override = proto.Field( - proto.MESSAGE, - number=1, - message='AppEngineRouting', - ) - - -class AppEngineHttpRequest(proto.Message): - r"""App Engine HTTP request. - - The message defines the HTTP request that is sent to an App Engine - app when the task is dispatched. - - This proto can only be used for tasks in a queue which has - [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] - set. - - Using - [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] - requires - ```appengine.applications.get`` `__ - Google IAM permission for the project and the following scope: - - ``https://www.googleapis.com/auth/cloud-platform`` - - The task will be delivered to the App Engine app which belongs to - the same project as the queue. For more information, see `How - Requests are - Routed `__ - and how routing is affected by `dispatch - files `__. - Traffic is encrypted during transport and never leaves Google - datacenters. Because this traffic is carried over a communication - mechanism internal to Google, you cannot explicitly set the protocol - (for example, HTTP or HTTPS). The request to the handler, however, - will appear to have used the HTTP protocol. - - The [AppEngineRouting][google.cloud.tasks.v2beta2.AppEngineRouting] - used to construct the URL that the task is delivered to can be set - at the queue-level or task-level: - - - If set, - [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] - is used for all tasks in the queue, no matter what the setting is - for the [task-level - app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. - - The ``url`` that the task will be sent to is: - - - ``url =`` - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] ``+`` - [relative_url][google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative_url] - - Tasks can be dispatched to secure app handlers, unsecure app - handlers, and URIs restricted with - ```login: admin`` `__. - Because tasks are not run as any user, they cannot be dispatched to - URIs restricted with - ```login: required`` `__ - Task dispatches also do not follow redirects. - - The task attempt has succeeded if the app's request handler returns - an HTTP response code in the range [``200`` - ``299``]. The task - attempt has failed if the app's handler returns a non-2xx response - code or Cloud Tasks does not receive response before the - [deadline][Task.dispatch_deadline]. Failed tasks will be retried - according to the [retry - configuration][google.cloud.tasks.v2beta2.Queue.retry_config]. - ``503`` (Service Unavailable) is considered an App Engine system - error instead of an application error and will cause Cloud Tasks' - traffic congestion control to temporarily throttle the queue's - dispatches. Unlike other types of task targets, a ``429`` (Too Many - Requests) response from an app handler does not cause traffic - congestion control to throttle the queue. - - Attributes: - http_method (google.cloud.tasks_v2beta2.types.HttpMethod): - The HTTP method to use for the request. The default is POST. - - The app's request handler for the task's target URL must be - able to handle HTTP requests with this http_method, - otherwise the task attempt fails with error code 405 (Method - Not Allowed). See `Writing a push task request - handler `__ - and the App Engine documentation for your runtime on `How - Requests are - Handled `__. - app_engine_routing (google.cloud.tasks_v2beta2.types.AppEngineRouting): - Task-level setting for App Engine routing. - - If set, - [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] - is used for all tasks in the queue, no matter what the - setting is for the [task-level - app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. - relative_url (str): - The relative URL. - The relative URL must begin with "/" and must be - a valid HTTP relative URL. It can contain a path - and query string arguments. If the relative URL - is empty, then the root path "/" will be used. - No spaces are allowed, and the maximum length - allowed is 2083 characters. - headers (Sequence[google.cloud.tasks_v2beta2.types.AppEngineHttpRequest.HeadersEntry]): - HTTP request headers. - - This map contains the header field names and values. Headers - can be set when the [task is - created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - Repeated headers are not supported but a header value can - contain commas. - - Cloud Tasks sets some headers to default values: - - - ``User-Agent``: By default, this header is - ``"AppEngine-Google; (+http://code.google.com/appengine)"``. - This header can be modified, but Cloud Tasks will append - ``"AppEngine-Google; (+http://code.google.com/appengine)"`` - to the modified ``User-Agent``. - - If the task has a - [payload][google.cloud.tasks.v2beta2.AppEngineHttpRequest.payload], - Cloud Tasks sets the following headers: - - - ``Content-Type``: By default, the ``Content-Type`` header - is set to ``"application/octet-stream"``. The default can - be overridden by explicitly setting ``Content-Type`` to a - particular media type when the [task is - created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/json"``. - - ``Content-Length``: This is computed by Cloud Tasks. This - value is output only. It cannot be changed. - - The headers below cannot be set or overridden: - - - ``Host`` - - ``X-Google-*`` - - ``X-AppEngine-*`` - - In addition, Cloud Tasks sets some headers when the task is - dispatched, such as headers containing information about the - task; see `request - headers `__. - These headers are set only when the task is dispatched, so - they are not visible when the task is returned in a Cloud - Tasks response. - - Although there is no specific limit for the maximum number - of headers or the size, there is a limit on the maximum size - of the [Task][google.cloud.tasks.v2beta2.Task]. For more - information, see the - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] - documentation. - payload (bytes): - Payload. - - The payload will be sent as the HTTP message body. A message - body, and thus a payload, is allowed only if the HTTP method - is POST or PUT. It is an error to set a data payload on a - task with an incompatible - [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. - """ - - http_method = proto.Field( - proto.ENUM, - number=1, - enum='HttpMethod', - ) - app_engine_routing = proto.Field( - proto.MESSAGE, - number=2, - message='AppEngineRouting', - ) - relative_url = proto.Field( - proto.STRING, - number=3, - ) - headers = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - payload = proto.Field( - proto.BYTES, - number=5, - ) - - -class AppEngineRouting(proto.Message): - r"""App Engine Routing. - - Defines routing characteristics specific to App Engine - service, - version, and instance. - - For more information about services, versions, and instances see `An - Overview of App - Engine `__, - `Microservices Architecture on Google App - Engine `__, - `App Engine Standard request - routing `__, - and `App Engine Flex request - routing `__. - - Attributes: - service (str): - App service. - - By default, the task is sent to the service which is the - default service when the task is attempted. - - For some queues or tasks which were created using the App - Engine Task Queue API, - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable into - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable, then - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - are the empty string. - version (str): - App version. - - By default, the task is sent to the version which is the - default version when the task is attempted. - - For some queues or tasks which were created using the App - Engine Task Queue API, - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable into - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable, then - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - are the empty string. - instance (str): - App instance. - - By default, the task is sent to an instance which is - available when the task is attempted. - - Requests can only be sent to a specific instance if `manual - scaling is used in App Engine - Standard `__. - App Engine Flex does not support instances. For more - information, see `App Engine Standard request - routing `__ - and `App Engine Flex request - routing `__. - host (str): - Output only. The host that the task is sent to. - - For more information, see `How Requests are - Routed `__. - - The host is constructed as: - - - ``host = [application_domain_name]``\ - ``| [service] + '.' + [application_domain_name]``\ - ``| [version] + '.' + [application_domain_name]``\ - ``| [version_dot_service]+ '.' + [application_domain_name]``\ - ``| [instance] + '.' + [application_domain_name]``\ - ``| [instance_dot_service] + '.' + [application_domain_name]``\ - ``| [instance_dot_version] + '.' + [application_domain_name]``\ - ``| [instance_dot_version_dot_service] + '.' + [application_domain_name]`` - - - ``application_domain_name`` = The domain name of the app, - for example .appspot.com, which is associated with the - queue's project ID. Some tasks which were created using - the App Engine SDK use a custom domain name. - - - ``service =`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - - - ``version =`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - - - ``version_dot_service =`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - ``+ '.' +`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - - - ``instance =`` - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - - - ``instance_dot_service =`` - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - ``+ '.' +`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - - - ``instance_dot_version =`` - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - ``+ '.' +`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - - - ``instance_dot_version_dot_service =`` - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - ``+ '.' +`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - ``+ '.' +`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - - If - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - is empty, then the task will be sent to the service which is - the default service when the task is attempted. - - If - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - is empty, then the task will be sent to the version which is - the default version when the task is attempted. - - If - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - is empty, then the task will be sent to an instance which is - available when the task is attempted. - - If - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], - or - [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - is invalid, then the task will be sent to the default - version of the default service when the task is attempted. - """ - - service = proto.Field( - proto.STRING, - number=1, - ) - version = proto.Field( - proto.STRING, - number=2, - ) - instance = proto.Field( - proto.STRING, - number=3, - ) - host = proto.Field( - proto.STRING, - number=4, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py b/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py deleted file mode 100644 index 801ebc72..00000000 --- a/owl-bot-staging/v2beta2/google/cloud/tasks_v2beta2/types/task.py +++ /dev/null @@ -1,254 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2beta2.types import target -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta2', - manifest={ - 'Task', - 'TaskStatus', - 'AttemptStatus', - }, -) - - -class Task(proto.Message): - r"""A unit of scheduled work. - Attributes: - name (str): - Optionally caller-specified in - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - - The task name. - - The task name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For - more information, see `Identifying - projects `__ - - ``LOCATION_ID`` is the canonical ID for the task's - location. The list of available locations can be obtained - by calling - [ListLocations][google.cloud.location.Locations.ListLocations]. - For more information, see - https://cloud.google.com/about/locations/. - - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), or hyphens (-). The maximum length is 100 - characters. - - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), or underscores (_). The maximum - length is 500 characters. - app_engine_http_request (google.cloud.tasks_v2beta2.types.AppEngineHttpRequest): - App Engine HTTP request that is sent to the task's target. - Can be set only if - [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] - is set on the queue. - - An App Engine task is a task that has - [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] - set. - pull_message (google.cloud.tasks_v2beta2.types.PullMessage): - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - to process the task. Can be set only if - [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] - is set on the queue. - - A pull task is a task that has - [PullMessage][google.cloud.tasks.v2beta2.PullMessage] set. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the task is scheduled to be attempted. - - For App Engine queues, this is when the task will be - attempted or retried. - - For pull queues, this is the time when the task is available - to be leased; if a task is currently leased, this is the - time when the current lease expires, that is, the time that - the task was leased plus the - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. - - ``schedule_time`` will be truncated to the nearest - microsecond. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that the task was created. - - ``create_time`` will be truncated to the nearest second. - status (google.cloud.tasks_v2beta2.types.TaskStatus): - Output only. The task status. - view (google.cloud.tasks_v2beta2.types.Task.View): - Output only. The view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] has been returned. - """ - class View(proto.Enum): - r"""The view specifies a subset of - [Task][google.cloud.tasks.v2beta2.Task] data. - - When a task is returned in a response, not all information is - retrieved by default because some data, such as payloads, might be - desirable to return only when needed because of its large size or - because of the sensitivity of data that it contains. - """ - VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - app_engine_http_request = proto.Field( - proto.MESSAGE, - number=3, - oneof='payload_type', - message=target.AppEngineHttpRequest, - ) - pull_message = proto.Field( - proto.MESSAGE, - number=4, - oneof='payload_type', - message=target.PullMessage, - ) - schedule_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - status = proto.Field( - proto.MESSAGE, - number=7, - message='TaskStatus', - ) - view = proto.Field( - proto.ENUM, - number=8, - enum=View, - ) - - -class TaskStatus(proto.Message): - r"""Status of the task. - Attributes: - attempt_dispatch_count (int): - Output only. The number of attempts - dispatched. - This count includes attempts which have been - dispatched but haven't received a response. - attempt_response_count (int): - Output only. The number of attempts which have received a - response. - - This field is not calculated for [pull - tasks][google.cloud.tasks.v2beta2.PullMessage]. - first_attempt_status (google.cloud.tasks_v2beta2.types.AttemptStatus): - Output only. The status of the task's first attempt. - - Only - [dispatch_time][google.cloud.tasks.v2beta2.AttemptStatus.dispatch_time] - will be set. The other - [AttemptStatus][google.cloud.tasks.v2beta2.AttemptStatus] - information is not retained by Cloud Tasks. - - This field is not calculated for [pull - tasks][google.cloud.tasks.v2beta2.PullMessage]. - last_attempt_status (google.cloud.tasks_v2beta2.types.AttemptStatus): - Output only. The status of the task's last attempt. - - This field is not calculated for [pull - tasks][google.cloud.tasks.v2beta2.PullMessage]. - """ - - attempt_dispatch_count = proto.Field( - proto.INT32, - number=1, - ) - attempt_response_count = proto.Field( - proto.INT32, - number=2, - ) - first_attempt_status = proto.Field( - proto.MESSAGE, - number=3, - message='AttemptStatus', - ) - last_attempt_status = proto.Field( - proto.MESSAGE, - number=4, - message='AttemptStatus', - ) - - -class AttemptStatus(proto.Message): - r"""The status of a task attempt. - Attributes: - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt was scheduled. - - ``schedule_time`` will be truncated to the nearest - microsecond. - dispatch_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt was dispatched. - - ``dispatch_time`` will be truncated to the nearest - microsecond. - response_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt response was - received. - - ``response_time`` will be truncated to the nearest - microsecond. - response_status (google.rpc.status_pb2.Status): - Output only. The response from the target for - this attempt. - If the task has not been attempted or the task - is currently running then the response status is - unset. - """ - - schedule_time = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - dispatch_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - response_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - response_status = proto.Field( - proto.MESSAGE, - number=4, - message=status_pb2.Status, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta2/mypy.ini b/owl-bot-staging/v2beta2/mypy.ini deleted file mode 100644 index 4505b485..00000000 --- a/owl-bot-staging/v2beta2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v2beta2/noxfile.py b/owl-bot-staging/v2beta2/noxfile.py deleted file mode 100644 index 943cc28c..00000000 --- a/owl-bot-staging/v2beta2/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/tasks_v2beta2/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py b/owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py deleted file mode 100644 index 4b2201b4..00000000 --- a/owl-bot-staging/v2beta2/scripts/fixup_tasks_v2beta2_keywords.py +++ /dev/null @@ -1,195 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class tasksCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'acknowledge_task': ('name', 'schedule_time', ), - 'cancel_lease': ('name', 'schedule_time', 'response_view', ), - 'create_queue': ('parent', 'queue', ), - 'create_task': ('parent', 'task', 'response_view', ), - 'delete_queue': ('name', ), - 'delete_task': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_queue': ('name', 'read_mask', ), - 'get_task': ('name', 'response_view', ), - 'lease_tasks': ('parent', 'lease_duration', 'max_tasks', 'response_view', 'filter', ), - 'list_queues': ('parent', 'filter', 'page_size', 'page_token', 'read_mask', ), - 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), - 'pause_queue': ('name', ), - 'purge_queue': ('name', ), - 'renew_lease': ('name', 'schedule_time', 'lease_duration', 'response_view', ), - 'resume_queue': ('name', ), - 'run_task': ('name', 'response_view', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_queue': ('queue', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=tasksCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the tasks client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2beta2/setup.py b/owl-bot-staging/v2beta2/setup.py deleted file mode 100644 index 2254eec1..00000000 --- a/owl-bot-staging/v2beta2/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-tasks', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v2beta2/tests/__init__.py b/owl-bot-staging/v2beta2/tests/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta2/tests/unit/__init__.py b/owl-bot-staging/v2beta2/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py b/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py deleted file mode 100644 index d2461be9..00000000 --- a/owl-bot-staging/v2beta2/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py +++ /dev/null @@ -1,6121 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.tasks_v2beta2.services.cloud_tasks import CloudTasksAsyncClient -from google.cloud.tasks_v2beta2.services.cloud_tasks import CloudTasksClient -from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers -from google.cloud.tasks_v2beta2.services.cloud_tasks import transports -from google.cloud.tasks_v2beta2.services.cloud_tasks.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.tasks_v2beta2.types import cloudtasks -from google.cloud.tasks_v2beta2.types import queue -from google.cloud.tasks_v2beta2.types import queue as gct_queue -from google.cloud.tasks_v2beta2.types import target -from google.cloud.tasks_v2beta2.types import task -from google.cloud.tasks_v2beta2.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudTasksClient._get_default_mtls_endpoint(None) is None - assert CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_called_with(True) - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -def test_cloud_tasks_client_get_transport_class(): - transport = CloudTasksClient.get_transport_class() - available_transports = [ - transports.CloudTasksGrpcTransport, - ] - assert transport in available_transports - - transport = CloudTasksClient.get_transport_class("grpc") - assert transport == transports.CloudTasksGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) -@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) -def test_cloud_tasks_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) -@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_tasks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_cloud_tasks_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_cloud_tasks_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_cloud_tasks_client_client_options_from_dict(): - with mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CloudTasksClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_list_queues(transport: str = 'grpc', request_type=cloudtasks.ListQueuesRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListQueuesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_queues_from_dict(): - test_list_queues(request_type=dict) - - -def test_list_queues_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - client.list_queues() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - -@pytest.mark.asyncio -async def test_list_queues_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListQueuesRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListQueuesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_queues_async_from_dict(): - await test_list_queues_async(request_type=dict) - - -def test_list_queues_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListQueuesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - call.return_value = cloudtasks.ListQueuesResponse() - client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_queues_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListQueuesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) - await client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_queues_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_queues( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_queues_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_queues( - cloudtasks.ListQueuesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_queues_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_queues( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_queues_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_queues( - cloudtasks.ListQueuesRequest(), - parent='parent_value', - ) - - -def test_list_queues_pager(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_queues(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, queue.Queue) - for i in results) - -def test_list_queues_pages(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - pages = list(client.list_queues(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_queues_async_pager(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_queues(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, queue.Queue) - for i in responses) - -@pytest.mark.asyncio -async def test_list_queues_async_pages(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_queues(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_queue(transport: str = 'grpc', request_type=cloudtasks.GetQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_get_queue_from_dict(): - test_get_queue(request_type=dict) - - -def test_get_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - client.get_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - -@pytest.mark.asyncio -async def test_get_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_get_queue_async_from_dict(): - await test_get_queue_async(request_type=dict) - - -def test_get_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_queue( - cloudtasks.GetQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_queue( - cloudtasks.GetQueueRequest(), - name='name_value', - ) - - -def test_create_queue(transport: str = 'grpc', request_type=cloudtasks.CreateQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -def test_create_queue_from_dict(): - test_create_queue(request_type=dict) - - -def test_create_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - client.create_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - -@pytest.mark.asyncio -async def test_create_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - )) - response = await client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_create_queue_async_from_dict(): - await test_create_queue_async(request_type=dict) - - -def test_create_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateQueueRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - call.return_value = gct_queue.Queue() - client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateQueueRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - await client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_queue( - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].queue == gct_queue.Queue(name='name_value') - - -def test_create_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_queue( - cloudtasks.CreateQueueRequest(), - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_queue( - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].queue == gct_queue.Queue(name='name_value') - - -@pytest.mark.asyncio -async def test_create_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_queue( - cloudtasks.CreateQueueRequest(), - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - -def test_update_queue(transport: str = 'grpc', request_type=cloudtasks.UpdateQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -def test_update_queue_from_dict(): - test_update_queue(request_type=dict) - - -def test_update_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - client.update_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - -@pytest.mark.asyncio -async def test_update_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.UpdateQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - )) - response = await client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_update_queue_async_from_dict(): - await test_update_queue_async(request_type=dict) - - -def test_update_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.UpdateQueueRequest() - - request.queue.name = 'queue.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - call.return_value = gct_queue.Queue() - client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'queue.name=queue.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.UpdateQueueRequest() - - request.queue.name = 'queue.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - await client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'queue.name=queue.name/value', - ) in kw['metadata'] - - -def test_update_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_queue( - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].queue == gct_queue.Queue(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_queue( - cloudtasks.UpdateQueueRequest(), - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_queue( - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].queue == gct_queue.Queue(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_queue( - cloudtasks.UpdateQueueRequest(), - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_queue(transport: str = 'grpc', request_type=cloudtasks.DeleteQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_queue_from_dict(): - test_delete_queue(request_type=dict) - - -def test_delete_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - client.delete_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - -@pytest.mark.asyncio -async def test_delete_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_queue_async_from_dict(): - await test_delete_queue_async(request_type=dict) - - -def test_delete_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - call.return_value = None - client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_queue( - cloudtasks.DeleteQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_queue( - cloudtasks.DeleteQueueRequest(), - name='name_value', - ) - - -def test_purge_queue(transport: str = 'grpc', request_type=cloudtasks.PurgeQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_purge_queue_from_dict(): - test_purge_queue(request_type=dict) - - -def test_purge_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - client.purge_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - -@pytest.mark.asyncio -async def test_purge_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PurgeQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_purge_queue_async_from_dict(): - await test_purge_queue_async(request_type=dict) - - -def test_purge_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PurgeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_purge_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PurgeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_purge_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.purge_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_purge_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.purge_queue( - cloudtasks.PurgeQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_purge_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.purge_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_purge_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.purge_queue( - cloudtasks.PurgeQueueRequest(), - name='name_value', - ) - - -def test_pause_queue(transport: str = 'grpc', request_type=cloudtasks.PauseQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_pause_queue_from_dict(): - test_pause_queue(request_type=dict) - - -def test_pause_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - client.pause_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - -@pytest.mark.asyncio -async def test_pause_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PauseQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_pause_queue_async_from_dict(): - await test_pause_queue_async(request_type=dict) - - -def test_pause_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PauseQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_pause_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PauseQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_pause_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.pause_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_pause_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.pause_queue( - cloudtasks.PauseQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_pause_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.pause_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_pause_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.pause_queue( - cloudtasks.PauseQueueRequest(), - name='name_value', - ) - - -def test_resume_queue(transport: str = 'grpc', request_type=cloudtasks.ResumeQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - app_engine_http_target=target.AppEngineHttpTarget(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -def test_resume_queue_from_dict(): - test_resume_queue(request_type=dict) - - -def test_resume_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - client.resume_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - -@pytest.mark.asyncio -async def test_resume_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ResumeQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - )) - response = await client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - - -@pytest.mark.asyncio -async def test_resume_queue_async_from_dict(): - await test_resume_queue_async(request_type=dict) - - -def test_resume_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ResumeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_resume_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ResumeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_resume_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resume_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_resume_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resume_queue( - cloudtasks.ResumeQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_resume_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.resume_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_resume_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.resume_queue( - cloudtasks.ResumeQueueRequest(), - name='name_value', - ) - - -def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_from_dict(): - test_get_iam_policy(request_type=dict) - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_get_iam_policy_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_from_dict(): - test_set_iam_policy(request_type=dict) - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_set_iam_policy_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_from_dict(): - test_test_iam_permissions(request_type=dict) - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -def test_test_iam_permissions_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_list_tasks(transport: str = 'grpc', request_type=cloudtasks.ListTasksRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse( - next_page_token='next_page_token_value', - ) - response = client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_tasks_from_dict(): - test_list_tasks(request_type=dict) - - -def test_list_tasks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - client.list_tasks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - -@pytest.mark.asyncio -async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListTasksRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) - - -def test_list_tasks_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = cloudtasks.ListTasksResponse() - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_tasks_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_tasks_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - cloudtasks.ListTasksRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_tasks_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tasks( - cloudtasks.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_pager(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tasks(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, task.Task) - for i in results) - -def test_list_tasks_pages(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, task.Task) - for i in responses) - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_tasks(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_task(transport: str = 'grpc', request_type=cloudtasks.GetTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -def test_get_task_from_dict(): - test_get_task(request_type=dict) - - -def test_get_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - client.get_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - -@pytest.mark.asyncio -async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - view=task.Task.View.BASIC, - )) - response = await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) - - -def test_get_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = task.Task() - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - cloudtasks.GetTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_task( - cloudtasks.GetTaskRequest(), - name='name_value', - ) - - -def test_create_task(transport: str = 'grpc', request_type=cloudtasks.CreateTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task( - name='name_value', - view=gct_task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_task.Task) - assert response.name == 'name_value' - assert response.view == gct_task.Task.View.BASIC - - -def test_create_task_from_dict(): - test_create_task(request_type=dict) - - -def test_create_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - client.create_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - -@pytest.mark.asyncio -async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task( - name='name_value', - view=gct_task.Task.View.BASIC, - )) - response = await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_task.Task) - assert response.name == 'name_value' - assert response.view == gct_task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_create_task_async_from_dict(): - await test_create_task_async(request_type=dict) - - -def test_create_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateTaskRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = gct_task.Task() - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateTaskRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) - await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_task( - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].task == gct_task.Task(name='name_value') - - -def test_create_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_task( - cloudtasks.CreateTaskRequest(), - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_task( - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].task == gct_task.Task(name='name_value') - - -@pytest.mark.asyncio -async def test_create_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_task( - cloudtasks.CreateTaskRequest(), - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - -def test_delete_task(transport: str = 'grpc', request_type=cloudtasks.DeleteTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_task_from_dict(): - test_delete_task(request_type=dict) - - -def test_delete_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - client.delete_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - -@pytest.mark.asyncio -async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_task_async_from_dict(): - await test_delete_task_async(request_type=dict) - - -def test_delete_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = None - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_task( - cloudtasks.DeleteTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_task( - cloudtasks.DeleteTaskRequest(), - name='name_value', - ) - - -def test_lease_tasks(transport: str = 'grpc', request_type=cloudtasks.LeaseTasksRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lease_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.LeaseTasksResponse( - ) - response = client.lease_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.LeaseTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.LeaseTasksResponse) - - -def test_lease_tasks_from_dict(): - test_lease_tasks(request_type=dict) - - -def test_lease_tasks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lease_tasks), - '__call__') as call: - client.lease_tasks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.LeaseTasksRequest() - - -@pytest.mark.asyncio -async def test_lease_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.LeaseTasksRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lease_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.LeaseTasksResponse( - )) - response = await client.lease_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.LeaseTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, cloudtasks.LeaseTasksResponse) - - -@pytest.mark.asyncio -async def test_lease_tasks_async_from_dict(): - await test_lease_tasks_async(request_type=dict) - - -def test_lease_tasks_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.LeaseTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lease_tasks), - '__call__') as call: - call.return_value = cloudtasks.LeaseTasksResponse() - client.lease_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_lease_tasks_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.LeaseTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lease_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.LeaseTasksResponse()) - await client.lease_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_lease_tasks_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lease_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.LeaseTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lease_tasks( - parent='parent_value', - lease_duration=duration_pb2.Duration(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) - - -def test_lease_tasks_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lease_tasks( - cloudtasks.LeaseTasksRequest(), - parent='parent_value', - lease_duration=duration_pb2.Duration(seconds=751), - ) - - -@pytest.mark.asyncio -async def test_lease_tasks_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lease_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.LeaseTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.LeaseTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lease_tasks( - parent='parent_value', - lease_duration=duration_pb2.Duration(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) - - -@pytest.mark.asyncio -async def test_lease_tasks_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.lease_tasks( - cloudtasks.LeaseTasksRequest(), - parent='parent_value', - lease_duration=duration_pb2.Duration(seconds=751), - ) - - -def test_acknowledge_task(transport: str = 'grpc', request_type=cloudtasks.AcknowledgeTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.acknowledge_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.AcknowledgeTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_acknowledge_task_from_dict(): - test_acknowledge_task(request_type=dict) - - -def test_acknowledge_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_task), - '__call__') as call: - client.acknowledge_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.AcknowledgeTaskRequest() - - -@pytest.mark.asyncio -async def test_acknowledge_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.AcknowledgeTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.acknowledge_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.AcknowledgeTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_acknowledge_task_async_from_dict(): - await test_acknowledge_task_async(request_type=dict) - - -def test_acknowledge_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.AcknowledgeTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_task), - '__call__') as call: - call.return_value = None - client.acknowledge_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_acknowledge_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.AcknowledgeTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.acknowledge_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_acknowledge_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.acknowledge_task( - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) - - -def test_acknowledge_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.acknowledge_task( - cloudtasks.AcknowledgeTaskRequest(), - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.asyncio -async def test_acknowledge_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.acknowledge_task( - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) - - -@pytest.mark.asyncio -async def test_acknowledge_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.acknowledge_task( - cloudtasks.AcknowledgeTaskRequest(), - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -def test_renew_lease(transport: str = 'grpc', request_type=cloudtasks.RenewLeaseRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.renew_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.renew_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RenewLeaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -def test_renew_lease_from_dict(): - test_renew_lease(request_type=dict) - - -def test_renew_lease_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.renew_lease), - '__call__') as call: - client.renew_lease() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RenewLeaseRequest() - - -@pytest.mark.asyncio -async def test_renew_lease_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RenewLeaseRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.renew_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - view=task.Task.View.BASIC, - )) - response = await client.renew_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RenewLeaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_renew_lease_async_from_dict(): - await test_renew_lease_async(request_type=dict) - - -def test_renew_lease_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RenewLeaseRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.renew_lease), - '__call__') as call: - call.return_value = task.Task() - client.renew_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_renew_lease_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RenewLeaseRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.renew_lease), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.renew_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_renew_lease_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.renew_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.renew_lease( - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - lease_duration=duration_pb2.Duration(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) - assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) - - -def test_renew_lease_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.renew_lease( - cloudtasks.RenewLeaseRequest(), - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - lease_duration=duration_pb2.Duration(seconds=751), - ) - - -@pytest.mark.asyncio -async def test_renew_lease_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.renew_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.renew_lease( - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - lease_duration=duration_pb2.Duration(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) - assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration(seconds=751) - - -@pytest.mark.asyncio -async def test_renew_lease_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.renew_lease( - cloudtasks.RenewLeaseRequest(), - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - lease_duration=duration_pb2.Duration(seconds=751), - ) - - -def test_cancel_lease(transport: str = 'grpc', request_type=cloudtasks.CancelLeaseRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.cancel_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CancelLeaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -def test_cancel_lease_from_dict(): - test_cancel_lease(request_type=dict) - - -def test_cancel_lease_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_lease), - '__call__') as call: - client.cancel_lease() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CancelLeaseRequest() - - -@pytest.mark.asyncio -async def test_cancel_lease_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CancelLeaseRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - view=task.Task.View.BASIC, - )) - response = await client.cancel_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CancelLeaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_cancel_lease_async_from_dict(): - await test_cancel_lease_async(request_type=dict) - - -def test_cancel_lease_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CancelLeaseRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_lease), - '__call__') as call: - call.return_value = task.Task() - client.cancel_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_lease_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CancelLeaseRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_lease), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.cancel_lease(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_cancel_lease_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_lease( - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) - - -def test_cancel_lease_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_lease( - cloudtasks.CancelLeaseRequest(), - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.asyncio -async def test_cancel_lease_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_lease), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_lease( - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) - - -@pytest.mark.asyncio -async def test_cancel_lease_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_lease( - cloudtasks.CancelLeaseRequest(), - name='name_value', - schedule_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -def test_run_task(transport: str = 'grpc', request_type=cloudtasks.RunTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -def test_run_task_from_dict(): - test_run_task(request_type=dict) - - -def test_run_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - client.run_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - -@pytest.mark.asyncio -async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RunTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - view=task.Task.View.BASIC, - )) - response = await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_run_task_async_from_dict(): - await test_run_task_async(request_type=dict) - - -def test_run_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RunTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = task.Task() - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RunTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_run_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_run_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_task( - cloudtasks.RunTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_run_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_run_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_task( - cloudtasks.RunTaskRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudTasksClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudTasksGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudTasksGrpcTransport, - ) - -def test_cloud_tasks_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudTasksTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_cloud_tasks_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CloudTasksTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_queues', - 'get_queue', - 'create_queue', - 'update_queue', - 'delete_queue', - 'purge_queue', - 'pause_queue', - 'resume_queue', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - 'list_tasks', - 'get_task', - 'create_task', - 'delete_task', - 'lease_tasks', - 'acknowledge_task', - 'renew_lease', - 'cancel_lease', - 'run_task', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - -def test_cloud_tasks_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudTasksClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudTasksClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudtasks.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_cloud_tasks_host_no_port(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com'), - ) - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -def test_cloud_tasks_host_with_port(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com:8000'), - ) - assert client.transport._host == 'cloudtasks.googleapis.com:8000' - -def test_cloud_tasks_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudTasksGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_tasks_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudTasksGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_queue_path(): - project = "squid" - location = "clam" - queue = "whelk" - expected = "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) - actual = CloudTasksClient.queue_path(project, location, queue) - assert expected == actual - - -def test_parse_queue_path(): - expected = { - "project": "octopus", - "location": "oyster", - "queue": "nudibranch", - } - path = CloudTasksClient.queue_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_queue_path(path) - assert expected == actual - -def test_task_path(): - project = "cuttlefish" - location = "mussel" - queue = "winkle" - task = "nautilus" - expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) - actual = CloudTasksClient.task_path(project, location, queue, task) - assert expected == actual - - -def test_parse_task_path(): - expected = { - "project": "scallop", - "location": "abalone", - "queue": "squid", - "task": "clam", - } - path = CloudTasksClient.task_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_task_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CloudTasksClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = CloudTasksClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = CloudTasksClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = CloudTasksClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CloudTasksClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = CloudTasksClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = CloudTasksClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = CloudTasksClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CloudTasksClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = CloudTasksClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: - transport_class = CloudTasksClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v2beta3/.coveragerc b/owl-bot-staging/v2beta3/.coveragerc deleted file mode 100644 index 1d5bc53f..00000000 --- a/owl-bot-staging/v2beta3/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/tasks/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2beta3/MANIFEST.in b/owl-bot-staging/v2beta3/MANIFEST.in deleted file mode 100644 index 249d527f..00000000 --- a/owl-bot-staging/v2beta3/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/tasks *.py -recursive-include google/cloud/tasks_v2beta3 *.py diff --git a/owl-bot-staging/v2beta3/README.rst b/owl-bot-staging/v2beta3/README.rst deleted file mode 100644 index 6171a7e2..00000000 --- a/owl-bot-staging/v2beta3/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Tasks API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Tasks API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2beta3/docs/conf.py b/owl-bot-staging/v2beta3/docs/conf.py deleted file mode 100644 index 62c563cc..00000000 --- a/owl-bot-staging/v2beta3/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-tasks documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-tasks" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-tasks-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-tasks.tex", - u"google-cloud-tasks Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-tasks", - u"Google Cloud Tasks Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-tasks", - u"google-cloud-tasks Documentation", - author, - "google-cloud-tasks", - "GAPIC library for Google Cloud Tasks API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2beta3/docs/index.rst b/owl-bot-staging/v2beta3/docs/index.rst deleted file mode 100644 index 396b0b20..00000000 --- a/owl-bot-staging/v2beta3/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - tasks_v2beta3/services - tasks_v2beta3/types diff --git a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst deleted file mode 100644 index ef422e09..00000000 --- a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/cloud_tasks.rst +++ /dev/null @@ -1,10 +0,0 @@ -CloudTasks ----------------------------- - -.. automodule:: google.cloud.tasks_v2beta3.services.cloud_tasks - :members: - :inherited-members: - -.. automodule:: google.cloud.tasks_v2beta3.services.cloud_tasks.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst deleted file mode 100644 index bd97721b..00000000 --- a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Tasks v2beta3 API -=========================================== -.. toctree:: - :maxdepth: 2 - - cloud_tasks diff --git a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst b/owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst deleted file mode 100644 index 0f3455d1..00000000 --- a/owl-bot-staging/v2beta3/docs/tasks_v2beta3/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Tasks v2beta3 API -======================================== - -.. automodule:: google.cloud.tasks_v2beta3.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py deleted file mode 100644 index 5af667c4..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.tasks_v2beta3.services.cloud_tasks.client import CloudTasksClient -from google.cloud.tasks_v2beta3.services.cloud_tasks.async_client import CloudTasksAsyncClient - -from google.cloud.tasks_v2beta3.types.cloudtasks import CreateQueueRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import CreateTaskRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import DeleteQueueRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import DeleteTaskRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import GetQueueRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import GetTaskRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import ListQueuesRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import ListQueuesResponse -from google.cloud.tasks_v2beta3.types.cloudtasks import ListTasksRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import ListTasksResponse -from google.cloud.tasks_v2beta3.types.cloudtasks import PauseQueueRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import PurgeQueueRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import ResumeQueueRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import RunTaskRequest -from google.cloud.tasks_v2beta3.types.cloudtasks import UpdateQueueRequest -from google.cloud.tasks_v2beta3.types.queue import Queue -from google.cloud.tasks_v2beta3.types.queue import QueueStats -from google.cloud.tasks_v2beta3.types.queue import RateLimits -from google.cloud.tasks_v2beta3.types.queue import RetryConfig -from google.cloud.tasks_v2beta3.types.queue import StackdriverLoggingConfig -from google.cloud.tasks_v2beta3.types.target import AppEngineHttpQueue -from google.cloud.tasks_v2beta3.types.target import AppEngineHttpRequest -from google.cloud.tasks_v2beta3.types.target import AppEngineRouting -from google.cloud.tasks_v2beta3.types.target import HttpRequest -from google.cloud.tasks_v2beta3.types.target import OAuthToken -from google.cloud.tasks_v2beta3.types.target import OidcToken -from google.cloud.tasks_v2beta3.types.target import PullMessage -from google.cloud.tasks_v2beta3.types.target import HttpMethod -from google.cloud.tasks_v2beta3.types.task import Attempt -from google.cloud.tasks_v2beta3.types.task import Task - -__all__ = ('CloudTasksClient', - 'CloudTasksAsyncClient', - 'CreateQueueRequest', - 'CreateTaskRequest', - 'DeleteQueueRequest', - 'DeleteTaskRequest', - 'GetQueueRequest', - 'GetTaskRequest', - 'ListQueuesRequest', - 'ListQueuesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'PauseQueueRequest', - 'PurgeQueueRequest', - 'ResumeQueueRequest', - 'RunTaskRequest', - 'UpdateQueueRequest', - 'Queue', - 'QueueStats', - 'RateLimits', - 'RetryConfig', - 'StackdriverLoggingConfig', - 'AppEngineHttpQueue', - 'AppEngineHttpRequest', - 'AppEngineRouting', - 'HttpRequest', - 'OAuthToken', - 'OidcToken', - 'PullMessage', - 'HttpMethod', - 'Attempt', - 'Task', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks/py.typed b/owl-bot-staging/v2beta3/google/cloud/tasks/py.typed deleted file mode 100644 index 41f0b1b8..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py deleted file mode 100644 index 52588da3..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/__init__.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.cloud_tasks import CloudTasksClient -from .services.cloud_tasks import CloudTasksAsyncClient - -from .types.cloudtasks import CreateQueueRequest -from .types.cloudtasks import CreateTaskRequest -from .types.cloudtasks import DeleteQueueRequest -from .types.cloudtasks import DeleteTaskRequest -from .types.cloudtasks import GetQueueRequest -from .types.cloudtasks import GetTaskRequest -from .types.cloudtasks import ListQueuesRequest -from .types.cloudtasks import ListQueuesResponse -from .types.cloudtasks import ListTasksRequest -from .types.cloudtasks import ListTasksResponse -from .types.cloudtasks import PauseQueueRequest -from .types.cloudtasks import PurgeQueueRequest -from .types.cloudtasks import ResumeQueueRequest -from .types.cloudtasks import RunTaskRequest -from .types.cloudtasks import UpdateQueueRequest -from .types.queue import Queue -from .types.queue import QueueStats -from .types.queue import RateLimits -from .types.queue import RetryConfig -from .types.queue import StackdriverLoggingConfig -from .types.target import AppEngineHttpQueue -from .types.target import AppEngineHttpRequest -from .types.target import AppEngineRouting -from .types.target import HttpRequest -from .types.target import OAuthToken -from .types.target import OidcToken -from .types.target import PullMessage -from .types.target import HttpMethod -from .types.task import Attempt -from .types.task import Task - -__all__ = ( - 'CloudTasksAsyncClient', -'AppEngineHttpQueue', -'AppEngineHttpRequest', -'AppEngineRouting', -'Attempt', -'CloudTasksClient', -'CreateQueueRequest', -'CreateTaskRequest', -'DeleteQueueRequest', -'DeleteTaskRequest', -'GetQueueRequest', -'GetTaskRequest', -'HttpMethod', -'HttpRequest', -'ListQueuesRequest', -'ListQueuesResponse', -'ListTasksRequest', -'ListTasksResponse', -'OAuthToken', -'OidcToken', -'PauseQueueRequest', -'PullMessage', -'PurgeQueueRequest', -'Queue', -'QueueStats', -'RateLimits', -'ResumeQueueRequest', -'RetryConfig', -'RunTaskRequest', -'StackdriverLoggingConfig', -'Task', -'UpdateQueueRequest', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json deleted file mode 100644 index 1f8cc2e7..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/gapic_metadata.json +++ /dev/null @@ -1,183 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.tasks_v2beta3", - "protoPackage": "google.cloud.tasks.v2beta3", - "schema": "1.0", - "services": { - "CloudTasks": { - "clients": { - "grpc": { - "libraryClient": "CloudTasksClient", - "rpcs": { - "CreateQueue": { - "methods": [ - "create_queue" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "DeleteQueue": { - "methods": [ - "delete_queue" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetQueue": { - "methods": [ - "get_queue" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListQueues": { - "methods": [ - "list_queues" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "PauseQueue": { - "methods": [ - "pause_queue" - ] - }, - "PurgeQueue": { - "methods": [ - "purge_queue" - ] - }, - "ResumeQueue": { - "methods": [ - "resume_queue" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateQueue": { - "methods": [ - "update_queue" - ] - } - } - }, - "grpc-async": { - "libraryClient": "CloudTasksAsyncClient", - "rpcs": { - "CreateQueue": { - "methods": [ - "create_queue" - ] - }, - "CreateTask": { - "methods": [ - "create_task" - ] - }, - "DeleteQueue": { - "methods": [ - "delete_queue" - ] - }, - "DeleteTask": { - "methods": [ - "delete_task" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetQueue": { - "methods": [ - "get_queue" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListQueues": { - "methods": [ - "list_queues" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "PauseQueue": { - "methods": [ - "pause_queue" - ] - }, - "PurgeQueue": { - "methods": [ - "purge_queue" - ] - }, - "ResumeQueue": { - "methods": [ - "resume_queue" - ] - }, - "RunTask": { - "methods": [ - "run_task" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateQueue": { - "methods": [ - "update_queue" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed deleted file mode 100644 index 41f0b1b8..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-tasks package uses inline types. diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py deleted file mode 100644 index 4de65971..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py deleted file mode 100644 index 1478acb5..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CloudTasksClient -from .async_client import CloudTasksAsyncClient - -__all__ = ( - 'CloudTasksClient', - 'CloudTasksAsyncClient', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py deleted file mode 100644 index 658f63c0..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py +++ /dev/null @@ -1,1803 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers -from google.cloud.tasks_v2beta3.types import cloudtasks -from google.cloud.tasks_v2beta3.types import queue -from google.cloud.tasks_v2beta3.types import queue as gct_queue -from google.cloud.tasks_v2beta3.types import target -from google.cloud.tasks_v2beta3.types import task -from google.cloud.tasks_v2beta3.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport -from .client import CloudTasksClient - - -class CloudTasksAsyncClient: - """Cloud Tasks allows developers to manage the execution of - background work in their applications. - """ - - _client: CloudTasksClient - - DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT - - queue_path = staticmethod(CloudTasksClient.queue_path) - parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) - task_path = staticmethod(CloudTasksClient.task_path) - parse_task_path = staticmethod(CloudTasksClient.parse_task_path) - common_billing_account_path = staticmethod(CloudTasksClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CloudTasksClient.parse_common_billing_account_path) - common_folder_path = staticmethod(CloudTasksClient.common_folder_path) - parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) - common_organization_path = staticmethod(CloudTasksClient.common_organization_path) - parse_common_organization_path = staticmethod(CloudTasksClient.parse_common_organization_path) - common_project_path = staticmethod(CloudTasksClient.common_project_path) - parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) - common_location_path = staticmethod(CloudTasksClient.common_location_path) - parse_common_location_path = staticmethod(CloudTasksClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksAsyncClient: The constructed client. - """ - return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksAsyncClient: The constructed client. - """ - return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudTasksTransport: - """Returns the transport used by the client instance. - - Returns: - CloudTasksTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(CloudTasksClient).get_transport_class, type(CloudTasksClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, CloudTasksTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud tasks client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudTasksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudTasksClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_queues(self, - request: cloudtasks.ListQueuesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListQueuesAsyncPager: - r"""Lists queues. - Queues are returned in lexicographical order. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.ListQueuesRequest`): - The request object. Request message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - parent (:class:`str`): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListQueuesAsyncPager: - Response message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ListQueuesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_queues, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListQueuesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_queue(self, - request: cloudtasks.GetQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Gets a queue. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.GetQueueRequest`): - The request object. Request message for - [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. - name (:class:`str`): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.GetQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_queue(self, - request: cloudtasks.CreateQueueRequest = None, - *, - parent: str = None, - queue: gct_queue.Queue = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.CreateQueueRequest`): - The request object. Request message for - [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. - parent (:class:`str`): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - queue (:class:`google.cloud.tasks_v2beta3.types.Queue`): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2beta3.Queue.name] - cannot be the same as an existing queue. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, queue]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.CreateQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if queue is not None: - request.queue = queue - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_queue(self, - request: cloudtasks.UpdateQueueRequest = None, - *, - queue: gct_queue.Queue = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.UpdateQueueRequest`): - The request object. Request message for - [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. - queue (:class:`google.cloud.tasks_v2beta3.types.Queue`): - Required. The queue to create or update. - - The queue's - [name][google.cloud.tasks.v2beta3.Queue.name] must be - specified. - - Output only fields cannot be modified using UpdateQueue. - Any value specified for an output only field will be - ignored. The queue's - [name][google.cloud.tasks.v2beta3.Queue.name] cannot be - changed. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - A mask used to specify which fields - of the queue are being updated. - If empty, then all fields will be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.UpdateQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("queue.name", request.queue.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_queue(self, - request: cloudtasks.DeleteQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.DeleteQueueRequest`): - The request object. Request message for - [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.DeleteQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def purge_queue(self, - request: cloudtasks.PurgeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.PurgeQueueRequest`): - The request object. Request message for - [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.PurgeQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.purge_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def pause_queue(self, - request: cloudtasks.PauseQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.PauseQueueRequest`): - The request object. Request message for - [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.PauseQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.pause_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def resume_queue(self, - request: cloudtasks.ResumeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta3.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.ResumeQueueRequest`): - The request object. Request message for - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - name (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ResumeQueueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.resume_queue, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Args: - request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`Sequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_tasks(self, - request: cloudtasks.ListTasksRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksAsyncPager: - r"""Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.ListTasksRequest`): - The request object. Request message for listing tasks - using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - parent (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListTasksAsyncPager: - Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.ListTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_tasks, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTasksAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_task(self, - request: cloudtasks.GetTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Gets a task. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.GetTaskRequest`): - The request object. Request message for getting a task - using - [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.GetTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_task(self, - request: cloudtasks.CreateTaskRequest = None, - *, - parent: str = None, - task: gct_task.Task = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_task.Task: - r"""Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.CreateTaskRequest`): - The request object. Request message for - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - parent (:class:`str`): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (:class:`google.cloud.tasks_v2beta3.types.Task`): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2beta3.Task.name]. If a name - is not specified then the system will generate a random - unique task id, which will be set in the task returned - in the [response][google.cloud.tasks.v2beta3.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set - it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task - de-duplication. If a task's ID is identical to that of - an existing task or a task that was deleted or executed - recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour - after the original task was deleted or executed. If the - task's queue was created using queue.yaml or queue.xml, - then another task with the same name can't be created - for ~9days after the original task was deleted or - executed. - - Because there is an extra lookup cost to identify - duplicate task names, these - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id - is recommended. Choosing task ids that are sequential or - have sequential prefixes, for example using a timestamp, - causes an increase in latency and error rates in all - task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store - and serve tasks efficiently. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.CreateTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_task, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_task(self, - request: cloudtasks.DeleteTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.DeleteTaskRequest`): - The request object. Request message for deleting a task - using - [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.DeleteTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def run_task(self, - request: cloudtasks.RunTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Args: - request (:class:`google.cloud.tasks_v2beta3.types.RunTaskRequest`): - The request object. Request message for forcing a task - to run now using - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. - name (:class:`str`): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = cloudtasks.RunTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_task, - default_timeout=20.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-tasks", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "CloudTasksAsyncClient", -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py deleted file mode 100644 index 7ec21704..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py +++ /dev/null @@ -1,1942 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers -from google.cloud.tasks_v2beta3.types import cloudtasks -from google.cloud.tasks_v2beta3.types import queue -from google.cloud.tasks_v2beta3.types import queue as gct_queue -from google.cloud.tasks_v2beta3.types import target -from google.cloud.tasks_v2beta3.types import task -from google.cloud.tasks_v2beta3.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CloudTasksGrpcTransport -from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport - - -class CloudTasksClientMeta(type): - """Metaclass for the CloudTasks client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] - _transport_registry["grpc"] = CloudTasksGrpcTransport - _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[CloudTasksTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudTasksClient(metaclass=CloudTasksClientMeta): - """Cloud Tasks allows developers to manage the execution of - background work in their applications. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudTasksTransport: - """Returns the transport used by the client instance. - - Returns: - CloudTasksTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def queue_path(project: str,location: str,queue: str,) -> str: - """Returns a fully-qualified queue string.""" - return "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) - - @staticmethod - def parse_queue_path(path: str) -> Dict[str,str]: - """Parses a queue path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_path(project: str,location: str,queue: str,task: str,) -> str: - """Returns a fully-qualified task string.""" - return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) - - @staticmethod - def parse_task_path(path: str) -> Dict[str,str]: - """Parses a task path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudTasksTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud tasks client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudTasksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudTasksTransport): - # transport is a CloudTasksTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - ) - - def list_queues(self, - request: cloudtasks.ListQueuesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListQueuesPager: - r"""Lists queues. - Queues are returned in lexicographical order. - - Args: - request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): - The request object. Request message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - parent (str): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListQueuesPager: - Response message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ListQueuesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ListQueuesRequest): - request = cloudtasks.ListQueuesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_queues] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListQueuesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_queue(self, - request: cloudtasks.GetQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Gets a queue. - - Args: - request (google.cloud.tasks_v2beta3.types.GetQueueRequest): - The request object. Request message for - [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. - name (str): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.GetQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.GetQueueRequest): - request = cloudtasks.GetQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_queue(self, - request: cloudtasks.CreateQueueRequest = None, - *, - parent: str = None, - queue: gct_queue.Queue = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2beta3.types.CreateQueueRequest): - The request object. Request message for - [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. - parent (str): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - queue (google.cloud.tasks_v2beta3.types.Queue): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2beta3.Queue.name] - cannot be the same as an existing queue. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, queue]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.CreateQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.CreateQueueRequest): - request = cloudtasks.CreateQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if queue is not None: - request.queue = queue - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_queue(self, - request: cloudtasks.UpdateQueueRequest = None, - *, - queue: gct_queue.Queue = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_queue.Queue: - r"""Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2beta3.types.UpdateQueueRequest): - The request object. Request message for - [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. - queue (google.cloud.tasks_v2beta3.types.Queue): - Required. The queue to create or update. - - The queue's - [name][google.cloud.tasks.v2beta3.Queue.name] must be - specified. - - Output only fields cannot be modified using UpdateQueue. - Any value specified for an output only field will be - ignored. The queue's - [name][google.cloud.tasks.v2beta3.Queue.name] cannot be - changed. - - This corresponds to the ``queue`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask used to specify which fields - of the queue are being updated. - If empty, then all fields will be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([queue, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.UpdateQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.UpdateQueueRequest): - request = cloudtasks.UpdateQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if queue is not None: - request.queue = queue - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("queue.name", request.queue.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_queue(self, - request: cloudtasks.DeleteQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Args: - request (google.cloud.tasks_v2beta3.types.DeleteQueueRequest): - The request object. Request message for - [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.DeleteQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.DeleteQueueRequest): - request = cloudtasks.DeleteQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def purge_queue(self, - request: cloudtasks.PurgeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Args: - request (google.cloud.tasks_v2beta3.types.PurgeQueueRequest): - The request object. Request message for - [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.PurgeQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.PurgeQueueRequest): - request = cloudtasks.PurgeQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.purge_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def pause_queue(self, - request: cloudtasks.PauseQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - Args: - request (google.cloud.tasks_v2beta3.types.PauseQueueRequest): - The request object. Request message for - [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.PauseQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.PauseQueueRequest): - request = cloudtasks.PauseQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.pause_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def resume_queue(self, - request: cloudtasks.ResumeQueueRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> queue.Queue: - r"""Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta3.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Args: - request (google.cloud.tasks_v2beta3.types.ResumeQueueRequest): - The request object. Request message for - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Queue: - A queue is a container of related - tasks. Queues are configured to manage - how those tasks are dispatched. - Configurable properties include rate - limits, retry options, queue types, and - others. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ResumeQueueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ResumeQueueRequest): - request = cloudtasks.ResumeQueueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resume_queue] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Args: - request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for `GetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Args: - request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for `SetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Args: - request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for - `TestIamPermissions` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (Sequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_tasks(self, - request: cloudtasks.ListTasksRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTasksPager: - r"""Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Args: - request (google.cloud.tasks_v2beta3.types.ListTasksRequest): - The request object. Request message for listing tasks - using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListTasksPager: - Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.ListTasksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.ListTasksRequest): - request = cloudtasks.ListTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTasksPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_task(self, - request: cloudtasks.GetTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Gets a task. - - Args: - request (google.cloud.tasks_v2beta3.types.GetTaskRequest): - The request object. Request message for getting a task - using - [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.GetTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.GetTaskRequest): - request = cloudtasks.GetTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_task(self, - request: cloudtasks.CreateTaskRequest = None, - *, - parent: str = None, - task: gct_task.Task = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gct_task.Task: - r"""Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Args: - request (google.cloud.tasks_v2beta3.types.CreateTaskRequest): - The request object. Request message for - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - task (google.cloud.tasks_v2beta3.types.Task): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2beta3.Task.name]. If a name - is not specified then the system will generate a random - unique task id, which will be set in the task returned - in the [response][google.cloud.tasks.v2beta3.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set - it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task - de-duplication. If a task's ID is identical to that of - an existing task or a task that was deleted or executed - recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour - after the original task was deleted or executed. If the - task's queue was created using queue.yaml or queue.xml, - then another task with the same name can't be created - for ~9days after the original task was deleted or - executed. - - Because there is an extra lookup cost to identify - duplicate task names, these - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id - is recommended. Choosing task ids that are sequential or - have sequential prefixes, for example using a timestamp, - causes an increase in latency and error rates in all - task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store - and serve tasks efficiently. - - This corresponds to the ``task`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, task]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.CreateTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.CreateTaskRequest): - request = cloudtasks.CreateTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if task is not None: - request.task = task - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_task(self, - request: cloudtasks.DeleteTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Args: - request (google.cloud.tasks_v2beta3.types.DeleteTaskRequest): - The request object. Request message for deleting a task - using - [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.DeleteTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.DeleteTaskRequest): - request = cloudtasks.DeleteTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def run_task(self, - request: cloudtasks.RunTaskRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> task.Task: - r"""Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Args: - request (google.cloud.tasks_v2beta3.types.RunTaskRequest): - The request object. Request message for forcing a task - to run now using - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.tasks_v2beta3.types.Task: - A unit of scheduled work. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a cloudtasks.RunTaskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, cloudtasks.RunTaskRequest): - request = cloudtasks.RunTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-tasks", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "CloudTasksClient", -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py deleted file mode 100644 index cbcd4de3..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.tasks_v2beta3.types import cloudtasks -from google.cloud.tasks_v2beta3.types import queue -from google.cloud.tasks_v2beta3.types import task - - -class ListQueuesPager: - """A pager for iterating through ``list_queues`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``queues`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListQueues`` requests and continue to iterate - through the ``queues`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudtasks.ListQueuesResponse], - request: cloudtasks.ListQueuesRequest, - response: cloudtasks.ListQueuesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): - The initial request object. - response (google.cloud.tasks_v2beta3.types.ListQueuesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListQueuesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[queue.Queue]: - for page in self.pages: - yield from page.queues - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListQueuesAsyncPager: - """A pager for iterating through ``list_queues`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``queues`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListQueues`` requests and continue to iterate - through the ``queues`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], - request: cloudtasks.ListQueuesRequest, - response: cloudtasks.ListQueuesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): - The initial request object. - response (google.cloud.tasks_v2beta3.types.ListQueuesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListQueuesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[queue.Queue]: - async def async_generator(): - async for page in self.pages: - for response in page.queues: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., cloudtasks.ListTasksResponse], - request: cloudtasks.ListTasksRequest, - response: cloudtasks.ListTasksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta3.types.ListTasksRequest): - The initial request object. - response (google.cloud.tasks_v2beta3.types.ListTasksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListTasksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[task.Task]: - for page in self.pages: - yield from page.tasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksAsyncPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], - request: cloudtasks.ListTasksRequest, - response: cloudtasks.ListTasksResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.tasks_v2beta3.types.ListTasksRequest): - The initial request object. - response (google.cloud.tasks_v2beta3.types.ListTasksResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = cloudtasks.ListTasksRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[task.Task]: - async def async_generator(): - async for page in self.pages: - for response in page.tasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py deleted file mode 100644 index 3db96829..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudTasksTransport -from .grpc import CloudTasksGrpcTransport -from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] -_transport_registry['grpc'] = CloudTasksGrpcTransport -_transport_registry['grpc_asyncio'] = CloudTasksGrpcAsyncIOTransport - -__all__ = ( - 'CloudTasksTransport', - 'CloudTasksGrpcTransport', - 'CloudTasksGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py deleted file mode 100644 index d184a5ab..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py +++ /dev/null @@ -1,441 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.tasks_v2beta3.types import cloudtasks -from google.cloud.tasks_v2beta3.types import queue -from google.cloud.tasks_v2beta3.types import queue as gct_queue -from google.cloud.tasks_v2beta3.types import task -from google.cloud.tasks_v2beta3.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-tasks', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class CloudTasksTransport(abc.ABC): - """Abstract transport class for CloudTasks.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudtasks.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_queues: gapic_v1.method.wrap_method( - self.list_queues, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_queue: gapic_v1.method.wrap_method( - self.get_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.create_queue: gapic_v1.method.wrap_method( - self.create_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.update_queue: gapic_v1.method.wrap_method( - self.update_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.delete_queue: gapic_v1.method.wrap_method( - self.delete_queue, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.purge_queue: gapic_v1.method.wrap_method( - self.purge_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.pause_queue: gapic_v1.method.wrap_method( - self.pause_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.resume_queue: gapic_v1.method.wrap_method( - self.resume_queue, - default_timeout=20.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=20.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_tasks: gapic_v1.method.wrap_method( - self.list_tasks, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_task: gapic_v1.method.wrap_method( - self.get_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.create_task: gapic_v1.method.wrap_method( - self.create_task, - default_timeout=20.0, - client_info=client_info, - ), - self.delete_task: gapic_v1.method.wrap_method( - self.delete_task, - default_retry=retries.Retry( -initial=0.1,maximum=10.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.run_task: gapic_v1.method.wrap_method( - self.run_task, - default_timeout=20.0, - client_info=client_info, - ), - } - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - Union[ - cloudtasks.ListQueuesResponse, - Awaitable[cloudtasks.ListQueuesResponse] - ]]: - raise NotImplementedError() - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - Union[ - gct_queue.Queue, - Awaitable[gct_queue.Queue] - ]]: - raise NotImplementedError() - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - Union[ - gct_queue.Queue, - Awaitable[gct_queue.Queue] - ]]: - raise NotImplementedError() - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - Union[ - queue.Queue, - Awaitable[queue.Queue] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - Union[ - cloudtasks.ListTasksResponse, - Awaitable[cloudtasks.ListTasksResponse] - ]]: - raise NotImplementedError() - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - Union[ - gct_task.Task, - Awaitable[gct_task.Task] - ]]: - raise NotImplementedError() - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'CloudTasksTransport', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py deleted file mode 100644 index c8114f63..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py +++ /dev/null @@ -1,780 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.tasks_v2beta3.types import cloudtasks -from google.cloud.tasks_v2beta3.types import queue -from google.cloud.tasks_v2beta3.types import queue as gct_queue -from google.cloud.tasks_v2beta3.types import task -from google.cloud.tasks_v2beta3.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO - - -class CloudTasksGrpcTransport(CloudTasksTransport): - """gRPC backend transport for CloudTasks. - - Cloud Tasks allows developers to manage the execution of - background work in their applications. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - cloudtasks.ListQueuesResponse]: - r"""Return a callable for the list queues method over gRPC. - - Lists queues. - Queues are returned in lexicographical order. - - Returns: - Callable[[~.ListQueuesRequest], - ~.ListQueuesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_queues' not in self._stubs: - self._stubs['list_queues'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/ListQueues', - request_serializer=cloudtasks.ListQueuesRequest.serialize, - response_deserializer=cloudtasks.ListQueuesResponse.deserialize, - ) - return self._stubs['list_queues'] - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - queue.Queue]: - r"""Return a callable for the get queue method over gRPC. - - Gets a queue. - - Returns: - Callable[[~.GetQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_queue' not in self._stubs: - self._stubs['get_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/GetQueue', - request_serializer=cloudtasks.GetQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['get_queue'] - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - gct_queue.Queue]: - r"""Return a callable for the create queue method over gRPC. - - Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.CreateQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_queue' not in self._stubs: - self._stubs['create_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue', - request_serializer=cloudtasks.CreateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['create_queue'] - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - gct_queue.Queue]: - r"""Return a callable for the update queue method over gRPC. - - Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.UpdateQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_queue' not in self._stubs: - self._stubs['update_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue', - request_serializer=cloudtasks.UpdateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['update_queue'] - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete queue method over gRPC. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.DeleteQueueRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_queue' not in self._stubs: - self._stubs['delete_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue', - request_serializer=cloudtasks.DeleteQueueRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_queue'] - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - queue.Queue]: - r"""Return a callable for the purge queue method over gRPC. - - Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Returns: - Callable[[~.PurgeQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'purge_queue' not in self._stubs: - self._stubs['purge_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue', - request_serializer=cloudtasks.PurgeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['purge_queue'] - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - queue.Queue]: - r"""Return a callable for the pause queue method over gRPC. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - Returns: - Callable[[~.PauseQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pause_queue' not in self._stubs: - self._stubs['pause_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue', - request_serializer=cloudtasks.PauseQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['pause_queue'] - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - queue.Queue]: - r"""Return a callable for the resume queue method over gRPC. - - Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta3.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Returns: - Callable[[~.ResumeQueueRequest], - ~.Queue]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_queue' not in self._stubs: - self._stubs['resume_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue', - request_serializer=cloudtasks.ResumeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['resume_queue'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - cloudtasks.ListTasksResponse]: - r"""Return a callable for the list tasks method over gRPC. - - Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Returns: - Callable[[~.ListTasksRequest], - ~.ListTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/ListTasks', - request_serializer=cloudtasks.ListTasksRequest.serialize, - response_deserializer=cloudtasks.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - task.Task]: - r"""Return a callable for the get task method over gRPC. - - Gets a task. - - Returns: - Callable[[~.GetTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/GetTask', - request_serializer=cloudtasks.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - gct_task.Task]: - r"""Return a callable for the create task method over gRPC. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Returns: - Callable[[~.CreateTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/CreateTask', - request_serializer=cloudtasks.CreateTaskRequest.serialize, - response_deserializer=gct_task.Task.deserialize, - ) - return self._stubs['create_task'] - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete task method over gRPC. - - Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Returns: - Callable[[~.DeleteTaskRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask', - request_serializer=cloudtasks.DeleteTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_task'] - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - task.Task]: - r"""Return a callable for the run task method over gRPC. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Returns: - Callable[[~.RunTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/RunTask', - request_serializer=cloudtasks.RunTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['run_task'] - - -__all__ = ( - 'CloudTasksGrpcTransport', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py deleted file mode 100644 index 1574d1e1..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py +++ /dev/null @@ -1,784 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.tasks_v2beta3.types import cloudtasks -from google.cloud.tasks_v2beta3.types import queue -from google.cloud.tasks_v2beta3.types import queue as gct_queue -from google.cloud.tasks_v2beta3.types import task -from google.cloud.tasks_v2beta3.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO -from .grpc import CloudTasksGrpcTransport - - -class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): - """gRPC AsyncIO backend transport for CloudTasks. - - Cloud Tasks allows developers to manage the execution of - background work in their applications. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudtasks.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_queues(self) -> Callable[ - [cloudtasks.ListQueuesRequest], - Awaitable[cloudtasks.ListQueuesResponse]]: - r"""Return a callable for the list queues method over gRPC. - - Lists queues. - Queues are returned in lexicographical order. - - Returns: - Callable[[~.ListQueuesRequest], - Awaitable[~.ListQueuesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_queues' not in self._stubs: - self._stubs['list_queues'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/ListQueues', - request_serializer=cloudtasks.ListQueuesRequest.serialize, - response_deserializer=cloudtasks.ListQueuesResponse.deserialize, - ) - return self._stubs['list_queues'] - - @property - def get_queue(self) -> Callable[ - [cloudtasks.GetQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the get queue method over gRPC. - - Gets a queue. - - Returns: - Callable[[~.GetQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_queue' not in self._stubs: - self._stubs['get_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/GetQueue', - request_serializer=cloudtasks.GetQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['get_queue'] - - @property - def create_queue(self) -> Callable[ - [cloudtasks.CreateQueueRequest], - Awaitable[gct_queue.Queue]]: - r"""Return a callable for the create queue method over gRPC. - - Creates a queue. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.CreateQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_queue' not in self._stubs: - self._stubs['create_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue', - request_serializer=cloudtasks.CreateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['create_queue'] - - @property - def update_queue(self) -> Callable[ - [cloudtasks.UpdateQueueRequest], - Awaitable[gct_queue.Queue]]: - r"""Return a callable for the update queue method over gRPC. - - Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a - maximum of 31 days. After a task is 31 days old, the task will - be deleted regardless of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.UpdateQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_queue' not in self._stubs: - self._stubs['update_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue', - request_serializer=cloudtasks.UpdateQueueRequest.serialize, - response_deserializer=gct_queue.Queue.deserialize, - ) - return self._stubs['update_queue'] - - @property - def delete_queue(self) -> Callable[ - [cloudtasks.DeleteQueueRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete queue method over gRPC. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be - created for 7 days. - - WARNING: Using this method may have unintended side effects if - you are using an App Engine ``queue.yaml`` or ``queue.xml`` file - to manage your queues. Read `Overview of Queue Management and - queue.yaml `__ - before using this method. - - Returns: - Callable[[~.DeleteQueueRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_queue' not in self._stubs: - self._stubs['delete_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue', - request_serializer=cloudtasks.DeleteQueueRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_queue'] - - @property - def purge_queue(self) -> Callable[ - [cloudtasks.PurgeQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the purge queue method over gRPC. - - Purges a queue by deleting all of its tasks. - All tasks created before this method is called are - permanently deleted. - Purge operations can take up to one minute to take - effect. Tasks might be dispatched before the purge takes - effect. A purge is irreversible. - - Returns: - Callable[[~.PurgeQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'purge_queue' not in self._stubs: - self._stubs['purge_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue', - request_serializer=cloudtasks.PurgeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['purge_queue'] - - @property - def pause_queue(self) -> Callable[ - [cloudtasks.PauseQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the pause queue method over gRPC. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - Tasks can still be added when the queue is paused. A queue is - paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - Returns: - Callable[[~.PauseQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pause_queue' not in self._stubs: - self._stubs['pause_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue', - request_serializer=cloudtasks.PauseQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['pause_queue'] - - @property - def resume_queue(self) -> Callable[ - [cloudtasks.ResumeQueueRequest], - Awaitable[queue.Queue]]: - r"""Return a callable for the resume queue method over gRPC. - - Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The - state of a queue is stored in the queue's - [state][google.cloud.tasks.v2beta3.Queue.state]; after calling - this method it will be set to - [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can lead - to target overloading. If you are resuming high-QPS queues, - follow the 500/50/5 pattern described in `Managing Cloud Tasks - Scaling - Risks `__. - - Returns: - Callable[[~.ResumeQueueRequest], - Awaitable[~.Queue]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resume_queue' not in self._stubs: - self._stubs['resume_queue'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue', - request_serializer=cloudtasks.ResumeQueueRequest.serialize, - response_deserializer=queue.Queue.deserialize, - ) - return self._stubs['resume_queue'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a - [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM - permissions yet. Project-level permissions are required to use - the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the - specified resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on a - [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does - not exist, this will return an empty set of permissions, not a - [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def list_tasks(self) -> Callable[ - [cloudtasks.ListTasksRequest], - Awaitable[cloudtasks.ListTasksResponse]]: - r"""Return a callable for the list tasks method over gRPC. - - Lists the tasks in a queue. - - By default, only the - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is - retrieved due to performance considerations; - [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] - controls the subset of information which is returned. - - The tasks may be returned in any order. The ordering may change - at any time. - - Returns: - Callable[[~.ListTasksRequest], - Awaitable[~.ListTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/ListTasks', - request_serializer=cloudtasks.ListTasksRequest.serialize, - response_deserializer=cloudtasks.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def get_task(self) -> Callable[ - [cloudtasks.GetTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the get task method over gRPC. - - Gets a task. - - Returns: - Callable[[~.GetTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/GetTask', - request_serializer=cloudtasks.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def create_task(self) -> Callable[ - [cloudtasks.CreateTaskRequest], - Awaitable[gct_task.Task]]: - r"""Return a callable for the create task method over gRPC. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask - command. - - - The maximum task size is 100KB. - - Returns: - Callable[[~.CreateTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_task' not in self._stubs: - self._stubs['create_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/CreateTask', - request_serializer=cloudtasks.CreateTaskRequest.serialize, - response_deserializer=gct_task.Task.deserialize, - ) - return self._stubs['create_task'] - - @property - def delete_task(self) -> Callable[ - [cloudtasks.DeleteTaskRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete task method over gRPC. - - Deletes a task. - A task can be deleted if it is scheduled or dispatched. - A task cannot be deleted if it has executed successfully - or permanently failed. - - Returns: - Callable[[~.DeleteTaskRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_task' not in self._stubs: - self._stubs['delete_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask', - request_serializer=cloudtasks.DeleteTaskRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_task'] - - @property - def run_task(self) -> Callable[ - [cloudtasks.RunTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the run task method over gRPC. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, - even if the task is already running, the queue has reached its - [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be - used to retry a failed task after a fix has been made or to - manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is - returned contains the [status][Task.status] after the task is - dispatched but before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - will be reset to the time that - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was - called plus the retry delay specified in the queue's - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - Returns: - Callable[[~.RunTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_task' not in self._stubs: - self._stubs['run_task'] = self.grpc_channel.unary_unary( - '/google.cloud.tasks.v2beta3.CloudTasks/RunTask', - request_serializer=cloudtasks.RunTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['run_task'] - - -__all__ = ( - 'CloudTasksGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py deleted file mode 100644 index 3b97518d..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/__init__.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .cloudtasks import ( - CreateQueueRequest, - CreateTaskRequest, - DeleteQueueRequest, - DeleteTaskRequest, - GetQueueRequest, - GetTaskRequest, - ListQueuesRequest, - ListQueuesResponse, - ListTasksRequest, - ListTasksResponse, - PauseQueueRequest, - PurgeQueueRequest, - ResumeQueueRequest, - RunTaskRequest, - UpdateQueueRequest, -) -from .queue import ( - Queue, - QueueStats, - RateLimits, - RetryConfig, - StackdriverLoggingConfig, -) -from .target import ( - AppEngineHttpQueue, - AppEngineHttpRequest, - AppEngineRouting, - HttpRequest, - OAuthToken, - OidcToken, - PullMessage, - HttpMethod, -) -from .task import ( - Attempt, - Task, -) - -__all__ = ( - 'CreateQueueRequest', - 'CreateTaskRequest', - 'DeleteQueueRequest', - 'DeleteTaskRequest', - 'GetQueueRequest', - 'GetTaskRequest', - 'ListQueuesRequest', - 'ListQueuesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'PauseQueueRequest', - 'PurgeQueueRequest', - 'ResumeQueueRequest', - 'RunTaskRequest', - 'UpdateQueueRequest', - 'Queue', - 'QueueStats', - 'RateLimits', - 'RetryConfig', - 'StackdriverLoggingConfig', - 'AppEngineHttpQueue', - 'AppEngineHttpRequest', - 'AppEngineRouting', - 'HttpRequest', - 'OAuthToken', - 'OidcToken', - 'PullMessage', - 'HttpMethod', - 'Attempt', - 'Task', -) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py deleted file mode 100644 index 5d3baf41..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/cloudtasks.py +++ /dev/null @@ -1,579 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2beta3.types import queue as gct_queue -from google.cloud.tasks_v2beta3.types import task as gct_task -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta3', - manifest={ - 'ListQueuesRequest', - 'ListQueuesResponse', - 'GetQueueRequest', - 'CreateQueueRequest', - 'UpdateQueueRequest', - 'DeleteQueueRequest', - 'PurgeQueueRequest', - 'PauseQueueRequest', - 'ResumeQueueRequest', - 'ListTasksRequest', - 'ListTasksResponse', - 'GetTaskRequest', - 'CreateTaskRequest', - 'DeleteTaskRequest', - 'RunTaskRequest', - }, -) - - -class ListQueuesRequest(proto.Message): - r"""Request message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - - Attributes: - parent (str): - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter (str): - ``filter`` can be used to specify a subset of queues. Any - [Queue][google.cloud.tasks.v2beta3.Queue] field can be used - as a filter and several operators as supported. For example: - ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver's Advanced Logs - Filters `__. - - Sample filter "state: PAUSED". - - Note that using filters might cause fewer queues than the - requested page_size to be returned. - page_size (int): - Requested page size. - - The maximum page size is 9800. If unspecified, the page size - will be the maximum. Fewer queues than requested might be - returned, even if more queues exist; use the - [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] - in the response to determine if more queues exist. - page_token (str): - A token identifying the page of results to return. - - To request the first page results, page_token must be empty. - To request the next page of results, page_token must be the - value of - [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] - returned from the previous call to - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] - method. It is an error to switch the value of the - [filter][google.cloud.tasks.v2beta3.ListQueuesRequest.filter] - while iterating through pages. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Read mask is used for a more granular control over - what the API returns. If the mask is not present all fields - will be returned except [Queue.stats]. [Queue.stats] will be - returned only if it was explicitly specified in the mask. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask_pb2.FieldMask, - ) - - -class ListQueuesResponse(proto.Message): - r"""Response message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - - Attributes: - queues (Sequence[google.cloud.tasks_v2beta3.types.Queue]): - The list of queues. - next_page_token (str): - A token to retrieve next page of results. - - To return the next page of results, call - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] - with this value as the - [page_token][google.cloud.tasks.v2beta3.ListQueuesRequest.page_token]. - - If the next_page_token is empty, there are no more results. - - The page token is valid for only 2 hours. - """ - - @property - def raw_page(self): - return self - - queues = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gct_queue.Queue, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetQueueRequest(proto.Message): - r"""Request message for - [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. - - Attributes: - name (str): - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Read mask is used for a more granular control over - what the API returns. If the mask is not present all fields - will be returned except [Queue.stats]. [Queue.stats] will be - returned only if it was explicitly specified in the mask. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - read_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class CreateQueueRequest(proto.Message): - r"""Request message for - [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. - - Attributes: - parent (str): - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling - Cloud Tasks' implementation of - [ListLocations][google.cloud.location.Locations.ListLocations]. - queue (google.cloud.tasks_v2beta3.types.Queue): - Required. The queue to create. - - [Queue's name][google.cloud.tasks.v2beta3.Queue.name] cannot - be the same as an existing queue. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - queue = proto.Field( - proto.MESSAGE, - number=2, - message=gct_queue.Queue, - ) - - -class UpdateQueueRequest(proto.Message): - r"""Request message for - [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. - - Attributes: - queue (google.cloud.tasks_v2beta3.types.Queue): - Required. The queue to create or update. - - The queue's [name][google.cloud.tasks.v2beta3.Queue.name] - must be specified. - - Output only fields cannot be modified using UpdateQueue. Any - value specified for an output only field will be ignored. - The queue's [name][google.cloud.tasks.v2beta3.Queue.name] - cannot be changed. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask used to specify which fields of the - queue are being updated. - If empty, then all fields will be updated. - """ - - queue = proto.Field( - proto.MESSAGE, - number=1, - message=gct_queue.Queue, - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteQueueRequest(proto.Message): - r"""Request message for - [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class PurgeQueueRequest(proto.Message): - r"""Request message for - [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class PauseQueueRequest(proto.Message): - r"""Request message for - [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ResumeQueueRequest(proto.Message): - r"""Request message for - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - - Attributes: - name (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTasksRequest(proto.Message): - r"""Request message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - - Attributes: - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (google.cloud.tasks_v2beta3.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - page_size (int): - Maximum page size. - - Fewer tasks than requested might be returned, even if more - tasks exist; use - [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] - in the response to determine if more tasks exist. - - The maximum page size is 1000. If unspecified, the page size - will be the maximum. - page_token (str): - A token identifying the page of results to return. - - To request the first page results, page_token must be empty. - To request the next page of results, page_token must be the - value of - [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] - returned from the previous call to - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] - method. - - The page token is valid for only 2 hours. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListTasksResponse(proto.Message): - r"""Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - - Attributes: - tasks (Sequence[google.cloud.tasks_v2beta3.types.Task]): - The list of tasks. - next_page_token (str): - A token to retrieve next page of results. - - To return the next page of results, call - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] - with this value as the - [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. - - If the next_page_token is empty, there are no more results. - """ - - @property - def raw_page(self): - return self - - tasks = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gct_task.Task, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetTaskRequest(proto.Message): - r"""Request message for getting a task using - [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (google.cloud.tasks_v2beta3.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - - -class CreateTaskRequest(proto.Message): - r"""Request message for - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - - Attributes: - parent (str): - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - task (google.cloud.tasks_v2beta3.types.Task): - Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task - [name][google.cloud.tasks.v2beta3.Task.name]. If a name is - not specified then the system will generate a random unique - task id, which will be set in the task returned in the - [response][google.cloud.tasks.v2beta3.Task.name]. - - If - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set it to - the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task de-duplication. - If a task's ID is identical to that of an existing task or a - task that was deleted or executed recently then the call - will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task's queue was created using Cloud Tasks, then another - task with the same name can't be created for ~1hour after - the original task was deleted or executed. If the task's - queue was created using queue.yaml or queue.xml, then - another task with the same name can't be created for ~9days - after the original task was deleted or executed. - - Because there is an extra lookup cost to identify duplicate - task names, these - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have - sequential prefixes, for example using a timestamp, causes - an increase in latency and error rates in all task commands. - The infrastructure relies on an approximately uniform - distribution of task ids to store and serve tasks - efficiently. - response_view (google.cloud.tasks_v2beta3.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - task = proto.Field( - proto.MESSAGE, - number=2, - message=gct_task.Task, - ) - response_view = proto.Field( - proto.ENUM, - number=3, - enum=gct_task.Task.View, - ) - - -class DeleteTaskRequest(proto.Message): - r"""Request message for deleting a task using - [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class RunTaskRequest(proto.Message): - r"""Request message for forcing a task to run now using - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. - - Attributes: - name (str): - Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (google.cloud.tasks_v2beta3.types.Task.View): - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. - - By default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such - as payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. - - Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google - IAM `__ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - response_view = proto.Field( - proto.ENUM, - number=2, - enum=gct_task.Task.View, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py deleted file mode 100644 index 228b887a..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/queue.py +++ /dev/null @@ -1,556 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2beta3.types import target -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta3', - manifest={ - 'Queue', - 'RateLimits', - 'RetryConfig', - 'StackdriverLoggingConfig', - 'QueueStats', - }, -) - - -class Queue(proto.Message): - r"""A queue is a container of related tasks. Queues are - configured to manage how those tasks are dispatched. - Configurable properties include rate limits, retry options, - queue types, and others. - - Attributes: - name (str): - Caller-specified and required in - [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue], - after which it becomes output only. - - The queue name. - - The queue name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For - more information, see `Identifying - projects `__ - - ``LOCATION_ID`` is the canonical ID for the queue's - location. The list of available locations can be obtained - by calling - [ListLocations][google.cloud.location.Locations.ListLocations]. - For more information, see - https://cloud.google.com/about/locations/. - - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), or hyphens (-). The maximum length is 100 - characters. - app_engine_http_queue (google.cloud.tasks_v2beta3.types.AppEngineHttpQueue): - [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] - settings apply only to [App Engine - tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest] in - this queue. [Http - tasks][google.cloud.tasks.v2beta3.HttpRequest] are not - affected by this proto. - rate_limits (google.cloud.tasks_v2beta3.types.RateLimits): - Rate limits for task dispatches. - - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] - and - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] - are related because they both control task attempts. However - they control task attempts in different ways: - - - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] - controls the total rate of dispatches from a queue (i.e. - all traffic dispatched from the queue, regardless of - whether the dispatch is from a first attempt or a retry). - - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] - controls what happens to particular a task after its - first attempt fails. That is, - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] - controls task retries (the second attempt, third attempt, - etc). - - The queue's actual dispatch rate is the result of: - - - Number of tasks in the queue - - User-specified throttling: - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits], - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config], - and the [queue's - state][google.cloud.tasks.v2beta3.Queue.state]. - - System throttling due to ``429`` (Too Many Requests) or - ``503`` (Service Unavailable) responses from the worker, - high error rates, or to smooth sudden large traffic - spikes. - retry_config (google.cloud.tasks_v2beta3.types.RetryConfig): - Settings that determine the retry behavior. - - - For tasks created using Cloud Tasks: the queue-level - retry settings apply to all tasks in the queue that were - created using Cloud Tasks. Retry settings cannot be set - on individual tasks. - - For tasks created using the App Engine SDK: the - queue-level retry settings apply to all tasks in the - queue which do not have retry settings explicitly set on - the task and were created by the App Engine SDK. See `App - Engine - documentation `__. - state (google.cloud.tasks_v2beta3.types.Queue.State): - Output only. The state of the queue. - - ``state`` can only be changed by calling - [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue], - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue], - or uploading - `queue.yaml/xml `__. - [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] - cannot be used to change ``state``. - purge_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last time this queue was purged. - - All tasks that were - [created][google.cloud.tasks.v2beta3.Task.create_time] - before this time were purged. - - A queue can be purged using - [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue], - the `App Engine Task Queue SDK, or the Cloud - Console `__. - - Purge time will be truncated to the nearest microsecond. - Purge time will be unset if the queue has never been purged. - task_ttl (google.protobuf.duration_pb2.Duration): - The maximum amount of time that a task will be retained in - this queue. - - Queues created by Cloud Tasks have a default ``task_ttl`` of - 31 days. After a task has lived for ``task_ttl``, the task - will be deleted regardless of whether it was dispatched or - not. - - The ``task_ttl`` for queues created via queue.yaml/xml is - equal to the maximum duration because there is a `storage - quota `__ - for these queues. To view the maximum valid duration, see - the documentation for [Duration][google.protobuf.Duration]. - tombstone_ttl (google.protobuf.duration_pb2.Duration): - The task tombstone time to live (TTL). - - After a task is deleted or executed, the task's tombstone is - retained for the length of time specified by - ``tombstone_ttl``. The tombstone is used by task - de-duplication; another task with the same name can't be - created until the tombstone has expired. For more - information about task de-duplication, see the documentation - for - [CreateTaskRequest][google.cloud.tasks.v2beta3.CreateTaskRequest.task]. - - Queues created by Cloud Tasks have a default - ``tombstone_ttl`` of 1 hour. - stackdriver_logging_config (google.cloud.tasks_v2beta3.types.StackdriverLoggingConfig): - Configuration options for writing logs to `Stackdriver - Logging `__. If this - field is unset, then no logs are written. - type_ (google.cloud.tasks_v2beta3.types.Queue.Type): - Immutable. The type of a queue (push or pull). - - ``Queue.type`` is an immutable property of the queue that is - set at the queue creation time. When left unspecified, the - default value of ``PUSH`` is selected. - stats (google.cloud.tasks_v2beta3.types.QueueStats): - Output only. The realtime, informational - statistics for a queue. In order to receive the - statistics the caller should include this field - in the FieldMask. - """ - class State(proto.Enum): - r"""State of the queue.""" - STATE_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - DISABLED = 3 - - class Type(proto.Enum): - r"""The type of the queue.""" - TYPE_UNSPECIFIED = 0 - PULL = 1 - PUSH = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - app_engine_http_queue = proto.Field( - proto.MESSAGE, - number=3, - oneof='queue_type', - message=target.AppEngineHttpQueue, - ) - rate_limits = proto.Field( - proto.MESSAGE, - number=4, - message='RateLimits', - ) - retry_config = proto.Field( - proto.MESSAGE, - number=5, - message='RetryConfig', - ) - state = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - purge_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - task_ttl = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - tombstone_ttl = proto.Field( - proto.MESSAGE, - number=9, - message=duration_pb2.Duration, - ) - stackdriver_logging_config = proto.Field( - proto.MESSAGE, - number=10, - message='StackdriverLoggingConfig', - ) - type_ = proto.Field( - proto.ENUM, - number=11, - enum=Type, - ) - stats = proto.Field( - proto.MESSAGE, - number=12, - message='QueueStats', - ) - - -class RateLimits(proto.Message): - r"""Rate limits. - - This message determines the maximum rate that tasks can be - dispatched by a queue, regardless of whether the dispatch is a first - task attempt or a retry. - - Note: The debugging command, - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask], will run a - task even if the queue has reached its - [RateLimits][google.cloud.tasks.v2beta3.RateLimits]. - - Attributes: - max_dispatches_per_second (float): - The maximum rate at which tasks are dispatched from this - queue. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - - For [App Engine - queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], - the maximum allowed value is 500. - - This field has the same meaning as `rate in - queue.yaml/xml `__. - max_burst_size (int): - The max burst size. - - Max burst size limits how fast tasks in queue are processed - when many tasks are in the queue and the rate is high. This - field allows the queue to have a high rate so processing - starts shortly after a task is enqueued, but still limits - resource usage when many tasks are enqueued in a short - period of time. - - The `token - bucket `__ - algorithm is used to control the rate of task dispatches. - Each queue has a token bucket that holds tokens, up to the - maximum specified by ``max_burst_size``. Each time a task is - dispatched, a token is removed from the bucket. Tasks will - be dispatched until the queue's bucket runs out of tokens. - The bucket will be continuously refilled with new tokens - based on - [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. - - The default value of ``max_burst_size`` is picked by Cloud - Tasks based on the value of - [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. - - The maximum value of ``max_burst_size`` is 500. - - For App Engine queues that were created or updated using - ``queue.yaml/xml``, ``max_burst_size`` is equal to - `bucket_size `__. - If - [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] - is called on a queue without explicitly setting a value for - ``max_burst_size``, ``max_burst_size`` value will get - updated if - [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] - is updating - [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. - max_concurrent_dispatches (int): - The maximum number of concurrent tasks that Cloud Tasks - allows to be dispatched for this queue. After this threshold - has been reached, Cloud Tasks stops dispatching tasks until - the number of concurrent requests decreases. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - The maximum allowed value is 5,000. - - This field has the same meaning as `max_concurrent_requests - in - queue.yaml/xml `__. - """ - - max_dispatches_per_second = proto.Field( - proto.DOUBLE, - number=1, - ) - max_burst_size = proto.Field( - proto.INT32, - number=2, - ) - max_concurrent_dispatches = proto.Field( - proto.INT32, - number=3, - ) - - -class RetryConfig(proto.Message): - r"""Retry config. - These settings determine when a failed task attempt is retried. - - Attributes: - max_attempts (int): - Number of attempts per task. - - Cloud Tasks will attempt the task ``max_attempts`` times - (that is, if the first attempt fails, then there will be - ``max_attempts - 1`` retries). Must be >= -1. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - -1 indicates unlimited attempts. - - This field has the same meaning as `task_retry_limit in - queue.yaml/xml `__. - max_retry_duration (google.protobuf.duration_pb2.Duration): - If positive, ``max_retry_duration`` specifies the time limit - for retrying a failed task, measured from when the task was - first attempted. Once ``max_retry_duration`` time has passed - *and* the task has been attempted - [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] - times, no further attempts will be made and the task will be - deleted. - - If zero, then the task age is unlimited. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - ``max_retry_duration`` will be truncated to the nearest - second. - - This field has the same meaning as `task_age_limit in - queue.yaml/xml `__. - min_backoff (google.protobuf.duration_pb2.Duration): - A task will be - [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] - for retry between - [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] - duration after it fails, if the queue's - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] - specifies that the task should be retried. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - ``min_backoff`` will be truncated to the nearest second. - - This field has the same meaning as `min_backoff_seconds in - queue.yaml/xml `__. - max_backoff (google.protobuf.duration_pb2.Duration): - A task will be - [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] - for retry between - [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] - duration after it fails, if the queue's - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] - specifies that the task should be retried. - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - ``max_backoff`` will be truncated to the nearest second. - - This field has the same meaning as `max_backoff_seconds in - queue.yaml/xml `__. - max_doublings (int): - The time between retries will double ``max_doublings`` - times. - - A task's retry interval starts at - [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff], - then doubles ``max_doublings`` times, then increases - linearly, and finally retries at intervals of - [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] - up to - [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] - times. - - For example, if - [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] - is 10s, - [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] - is 300s, and ``max_doublings`` is 3, then the a task will - first be retried in 10s. The retry interval will double - three times, and then increase linearly by 2^3 \* 10s. - Finally, the task will retry at intervals of - [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] - until the task has been attempted - [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] - times. Thus, the requests will retry at 10s, 20s, 40s, 80s, - 160s, 240s, 300s, 300s, .... - - If unspecified when the queue is created, Cloud Tasks will - pick the default. - - This field has the same meaning as `max_doublings in - queue.yaml/xml `__. - """ - - max_attempts = proto.Field( - proto.INT32, - number=1, - ) - max_retry_duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - min_backoff = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - max_backoff = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - max_doublings = proto.Field( - proto.INT32, - number=5, - ) - - -class StackdriverLoggingConfig(proto.Message): - r"""Configuration options for writing logs to `Stackdriver - Logging `__. - - Attributes: - sampling_ratio (float): - Specifies the fraction of operations to write to - `Stackdriver - Logging `__. This - field may contain any value between 0.0 and 1.0, inclusive. - 0.0 is the default and means that no operations are logged. - """ - - sampling_ratio = proto.Field( - proto.DOUBLE, - number=1, - ) - - -class QueueStats(proto.Message): - r"""Statistics for a queue. - Attributes: - tasks_count (int): - Output only. An estimation of the number of - tasks in the queue, that is, the tasks in the - queue that haven't been executed, the tasks in - the queue which the queue has dispatched but has - not yet received a reply for, and the failed - tasks that the queue is retrying. - oldest_estimated_arrival_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. An estimation of the nearest - time in the future where a task in the queue is - scheduled to be executed. - executed_last_minute_count (int): - Output only. The number of tasks that the - queue has dispatched and received a reply for - during the last minute. This variable counts - both successful and non-successful executions. - concurrent_dispatches_count (int): - Output only. The number of requests that the - queue has dispatched but has not received a - reply for yet. - effective_execution_rate (float): - Output only. The current maximum number of - tasks per second executed by the queue. The - maximum value of this variable is controlled by - the RateLimits of the Queue. However, this value - could be less to avoid overloading the endpoints - tasks in the queue are targeting. - """ - - tasks_count = proto.Field( - proto.INT64, - number=1, - ) - oldest_estimated_arrival_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - executed_last_minute_count = proto.Field( - proto.INT64, - number=3, - ) - concurrent_dispatches_count = proto.Field( - proto.INT64, - number=4, - ) - effective_execution_rate = proto.Field( - proto.DOUBLE, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py deleted file mode 100644 index 09b67def..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/target.py +++ /dev/null @@ -1,620 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta3', - manifest={ - 'HttpMethod', - 'PullMessage', - 'HttpRequest', - 'AppEngineHttpQueue', - 'AppEngineHttpRequest', - 'AppEngineRouting', - 'OAuthToken', - 'OidcToken', - }, -) - - -class HttpMethod(proto.Enum): - r"""The HTTP method used to execute the task.""" - HTTP_METHOD_UNSPECIFIED = 0 - POST = 1 - GET = 2 - HEAD = 3 - PUT = 4 - DELETE = 5 - PATCH = 6 - OPTIONS = 7 - - -class PullMessage(proto.Message): - r"""Pull Message. - - This proto can only be used for tasks in a queue which has - [PULL][google.cloud.tasks.v2beta3.Queue.type] type. It currently - exists for backwards compatibility with the App Engine Task Queue - SDK. This message type maybe returned with methods - [list][google.cloud.tasks.v2beta3.CloudTask.ListTasks] and - [get][google.cloud.tasks.v2beta3.CloudTask.ListTasks], when the - response view is [FULL][google.cloud.tasks.v2beta3.Task.View.Full]. - - Attributes: - payload (bytes): - A data payload consumed by the worker to - execute the task. - tag (str): - The tasks's tag. - - The tag is less than 500 characters. - - SDK compatibility: Although the SDK allows tags to be either - string or - `bytes `__, - only UTF-8 encoded tags can be used in Cloud Tasks. If a tag - isn't UTF-8 encoded, the tag will be empty when the task is - returned by Cloud Tasks. - """ - - payload = proto.Field( - proto.BYTES, - number=1, - ) - tag = proto.Field( - proto.STRING, - number=2, - ) - - -class HttpRequest(proto.Message): - r"""HTTP request. - - The task will be pushed to the worker as an HTTP request. If the - worker or the redirected worker acknowledges the task by returning a - successful HTTP response code ([``200`` - ``299``]), the task will - be removed from the queue. If any other HTTP response code is - returned or no response is received, the task will be retried - according to the following: - - - User-specified throttling: [retry - configuration][google.cloud.tasks.v2beta3.Queue.retry_config], - [rate limits][google.cloud.tasks.v2beta3.Queue.rate_limits], and - the [queue's state][google.cloud.tasks.v2beta3.Queue.state]. - - - System throttling: To prevent the worker from overloading, Cloud - Tasks may temporarily reduce the queue's effective rate. - User-specified settings will not be changed. - - System throttling happens because: - - - Cloud Tasks backs off on all errors. Normally the backoff - specified in [rate - limits][google.cloud.tasks.v2beta3.Queue.rate_limits] will be - used. But if the worker returns ``429`` (Too Many Requests), - ``503`` (Service Unavailable), or the rate of errors is high, - Cloud Tasks will use a higher backoff rate. The retry specified - in the ``Retry-After`` HTTP response header is considered. - - - To prevent traffic spikes and to smooth sudden increases in - traffic, dispatches ramp up slowly when the queue is newly - created or idle and if large numbers of tasks suddenly become - available to dispatch (due to spikes in create task rates, the - queue being unpaused, or many tasks that are scheduled at the - same time). - - Attributes: - url (str): - Required. The full url path that the request will be sent - to. - - This string must begin with either "http://" or "https://". - Some examples are: ``http://acme.com`` and - ``https://acme.com/sales:8080``. Cloud Tasks will encode - some characters for safety and compatibility. The maximum - allowed URL length is 2083 characters after encoding. - - The ``Location`` header response from a redirect response - [``300`` - ``399``] may be followed. The redirect is not - counted as a separate attempt. - http_method (google.cloud.tasks_v2beta3.types.HttpMethod): - The HTTP method to use for the request. The - default is POST. - headers (Sequence[google.cloud.tasks_v2beta3.types.HttpRequest.HeadersEntry]): - HTTP request headers. - - This map contains the header field names and values. Headers - can be set when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - - These headers represent a subset of the headers that will - accompany the task's HTTP request. Some HTTP request headers - will be ignored or replaced. - - A partial list of headers that will be ignored or replaced - is: - - - Host: This will be computed by Cloud Tasks and derived - from - [HttpRequest.url][google.cloud.tasks.v2beta3.HttpRequest.url]. - - Content-Length: This will be computed by Cloud Tasks. - - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. - - X-Google-*: Google use only. - - X-AppEngine-*: Google use only. - - ``Content-Type`` won't be set by Cloud Tasks. You can - explicitly set ``Content-Type`` to a media type when the - [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/octet-stream"`` or ``"application/json"``. - - Headers which can have multiple values (according to - RFC2616) can be specified using comma-separated values. - - The size of the headers must be less than 80KB. - body (bytes): - HTTP request body. - - A request body is allowed only if the [HTTP - method][google.cloud.tasks.v2beta3.HttpRequest.http_method] - is POST, PUT, or PATCH. It is an error to set body on a task - with an incompatible - [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. - oauth_token (google.cloud.tasks_v2beta3.types.OAuthToken): - If specified, an `OAuth - token `__ - will be generated and attached as an ``Authorization`` - header in the HTTP request. - - This type of authorization should generally only be used - when calling Google APIs hosted on \*.googleapis.com. - oidc_token (google.cloud.tasks_v2beta3.types.OidcToken): - If specified, an - `OIDC `__ - token will be generated and attached as an ``Authorization`` - header in the HTTP request. - - This type of authorization can be used for many scenarios, - including calling Cloud Run, or endpoints where you intend - to validate the token yourself. - """ - - url = proto.Field( - proto.STRING, - number=1, - ) - http_method = proto.Field( - proto.ENUM, - number=2, - enum='HttpMethod', - ) - headers = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - body = proto.Field( - proto.BYTES, - number=4, - ) - oauth_token = proto.Field( - proto.MESSAGE, - number=5, - oneof='authorization_header', - message='OAuthToken', - ) - oidc_token = proto.Field( - proto.MESSAGE, - number=6, - oneof='authorization_header', - message='OidcToken', - ) - - -class AppEngineHttpQueue(proto.Message): - r"""App Engine HTTP queue. - - The task will be delivered to the App Engine application hostname - specified by its - [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] - and - [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest]. - The documentation for - [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] - explains how the task's host URL is constructed. - - Using - [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] - requires - ```appengine.applications.get`` `__ - Google IAM permission for the project and the following scope: - - ``https://www.googleapis.com/auth/cloud-platform`` - - Attributes: - app_engine_routing_override (google.cloud.tasks_v2beta3.types.AppEngineRouting): - Overrides for the [task-level - app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. - - If set, ``app_engine_routing_override`` is used for all - tasks in the queue, no matter what the setting is for the - [task-level - app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. - """ - - app_engine_routing_override = proto.Field( - proto.MESSAGE, - number=1, - message='AppEngineRouting', - ) - - -class AppEngineHttpRequest(proto.Message): - r"""App Engine HTTP request. - - The message defines the HTTP request that is sent to an App Engine - app when the task is dispatched. - - Using - [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] - requires - ```appengine.applications.get`` `__ - Google IAM permission for the project and the following scope: - - ``https://www.googleapis.com/auth/cloud-platform`` - - The task will be delivered to the App Engine app which belongs to - the same project as the queue. For more information, see `How - Requests are - Routed `__ - and how routing is affected by `dispatch - files `__. - Traffic is encrypted during transport and never leaves Google - datacenters. Because this traffic is carried over a communication - mechanism internal to Google, you cannot explicitly set the protocol - (for example, HTTP or HTTPS). The request to the handler, however, - will appear to have used the HTTP protocol. - - The [AppEngineRouting][google.cloud.tasks.v2beta3.AppEngineRouting] - used to construct the URL that the task is delivered to can be set - at the queue-level or task-level: - - - If set, - [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] - is used for all tasks in the queue, no matter what the setting is - for the [task-level - app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. - - The ``url`` that the task will be sent to is: - - - ``url =`` - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] ``+`` - [relative_uri][google.cloud.tasks.v2beta3.AppEngineHttpRequest.relative_uri] - - Tasks can be dispatched to secure app handlers, unsecure app - handlers, and URIs restricted with - ```login: admin`` `__. - Because tasks are not run as any user, they cannot be dispatched to - URIs restricted with - ```login: required`` `__ - Task dispatches also do not follow redirects. - - The task attempt has succeeded if the app's request handler returns - an HTTP response code in the range [``200`` - ``299``]. The task - attempt has failed if the app's handler returns a non-2xx response - code or Cloud Tasks does not receive response before the - [deadline][google.cloud.tasks.v2beta3.Task.dispatch_deadline]. - Failed tasks will be retried according to the [retry - configuration][google.cloud.tasks.v2beta3.Queue.retry_config]. - ``503`` (Service Unavailable) is considered an App Engine system - error instead of an application error and will cause Cloud Tasks' - traffic congestion control to temporarily throttle the queue's - dispatches. Unlike other types of task targets, a ``429`` (Too Many - Requests) response from an app handler does not cause traffic - congestion control to throttle the queue. - - Attributes: - http_method (google.cloud.tasks_v2beta3.types.HttpMethod): - The HTTP method to use for the request. The default is POST. - - The app's request handler for the task's target URL must be - able to handle HTTP requests with this http_method, - otherwise the task attempt fails with error code 405 (Method - Not Allowed). See `Writing a push task request - handler `__ - and the App Engine documentation for your runtime on `How - Requests are - Handled `__. - app_engine_routing (google.cloud.tasks_v2beta3.types.AppEngineRouting): - Task-level setting for App Engine routing. - - If set, - [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] - is used for all tasks in the queue, no matter what the - setting is for the [task-level - app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. - relative_uri (str): - The relative URI. - The relative URI must begin with "/" and must be - a valid HTTP relative URI. It can contain a path - and query string arguments. If the relative URI - is empty, then the root path "/" will be used. - No spaces are allowed, and the maximum length - allowed is 2083 characters. - headers (Sequence[google.cloud.tasks_v2beta3.types.AppEngineHttpRequest.HeadersEntry]): - HTTP request headers. - - This map contains the header field names and values. Headers - can be set when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - Repeated headers are not supported but a header value can - contain commas. - - Cloud Tasks sets some headers to default values: - - - ``User-Agent``: By default, this header is - ``"AppEngine-Google; (+http://code.google.com/appengine)"``. - This header can be modified, but Cloud Tasks will append - ``"AppEngine-Google; (+http://code.google.com/appengine)"`` - to the modified ``User-Agent``. - - If the task has a - [body][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body], - Cloud Tasks sets the following headers: - - - ``Content-Type``: By default, the ``Content-Type`` header - is set to ``"application/octet-stream"``. The default can - be overridden by explicitly setting ``Content-Type`` to a - particular media type when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/json"``. - - ``Content-Length``: This is computed by Cloud Tasks. This - value is output only. It cannot be changed. - - The headers below cannot be set or overridden: - - - ``Host`` - - ``X-Google-*`` - - ``X-AppEngine-*`` - - In addition, Cloud Tasks sets some headers when the task is - dispatched, such as headers containing information about the - task; see `request - headers `__. - These headers are set only when the task is dispatched, so - they are not visible when the task is returned in a Cloud - Tasks response. - - Although there is no specific limit for the maximum number - of headers or the size, there is a limit on the maximum size - of the [Task][google.cloud.tasks.v2beta3.Task]. For more - information, see the - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] - documentation. - body (bytes): - HTTP request body. - - A request body is allowed only if the HTTP method is POST or - PUT. It is an error to set a body on a task with an - incompatible - [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. - """ - - http_method = proto.Field( - proto.ENUM, - number=1, - enum='HttpMethod', - ) - app_engine_routing = proto.Field( - proto.MESSAGE, - number=2, - message='AppEngineRouting', - ) - relative_uri = proto.Field( - proto.STRING, - number=3, - ) - headers = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - body = proto.Field( - proto.BYTES, - number=5, - ) - - -class AppEngineRouting(proto.Message): - r"""App Engine Routing. - - Defines routing characteristics specific to App Engine - service, - version, and instance. - - For more information about services, versions, and instances see `An - Overview of App - Engine `__, - `Microservices Architecture on Google App - Engine `__, - `App Engine Standard request - routing `__, - and `App Engine Flex request - routing `__. - - Attributes: - service (str): - App service. - - By default, the task is sent to the service which is the - default service when the task is attempted. - - For some queues or tasks which were created using the App - Engine Task Queue API, - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable into - [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable, then - [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] - are the empty string. - version (str): - App version. - - By default, the task is sent to the version which is the - default version when the task is attempted. - - For some queues or tasks which were created using the App - Engine Task Queue API, - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable into - [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable, then - [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] - are the empty string. - instance (str): - App instance. - - By default, the task is sent to an instance which is - available when the task is attempted. - - Requests can only be sent to a specific instance if `manual - scaling is used in App Engine - Standard `__. - App Engine Flex does not support instances. For more - information, see `App Engine Standard request - routing `__ - and `App Engine Flex request - routing `__. - host (str): - Output only. The host that the task is sent to. - - The host is constructed from the domain name of the app - associated with the queue's project ID (for example - .appspot.com), and the - [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], - [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], - and - [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. - Tasks which were created using the App Engine SDK might have - a custom domain name. - - For more information, see `How Requests are - Routed `__. - """ - - service = proto.Field( - proto.STRING, - number=1, - ) - version = proto.Field( - proto.STRING, - number=2, - ) - instance = proto.Field( - proto.STRING, - number=3, - ) - host = proto.Field( - proto.STRING, - number=4, - ) - - -class OAuthToken(proto.Message): - r"""Contains information needed for generating an `OAuth - token `__. - This type of authorization should generally only be used when - calling Google APIs hosted on \*.googleapis.com. - - Attributes: - service_account_email (str): - `Service account - email `__ - to be used for generating OAuth token. The service account - must be within the same project as the queue. The caller - must have iam.serviceAccounts.actAs permission for the - service account. - scope (str): - OAuth scope to be used for generating OAuth - access token. If not specified, - "https://www.googleapis.com/auth/cloud-platform" - will be used. - """ - - service_account_email = proto.Field( - proto.STRING, - number=1, - ) - scope = proto.Field( - proto.STRING, - number=2, - ) - - -class OidcToken(proto.Message): - r"""Contains information needed for generating an `OpenID Connect - token `__. - This type of authorization can be used for many scenarios, including - calling Cloud Run, or endpoints where you intend to validate the - token yourself. - - Attributes: - service_account_email (str): - `Service account - email `__ - to be used for generating OIDC token. The service account - must be within the same project as the queue. The caller - must have iam.serviceAccounts.actAs permission for the - service account. - audience (str): - Audience to be used when generating OIDC - token. If not specified, the URI specified in - target will be used. - """ - - service_account_email = proto.Field( - proto.STRING, - number=1, - ) - audience = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py b/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py deleted file mode 100644 index 66293416..00000000 --- a/owl-bot-staging/v2beta3/google/cloud/tasks_v2beta3/types/task.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.tasks_v2beta3.types import target -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.tasks.v2beta3', - manifest={ - 'Task', - 'Attempt', - }, -) - - -class Task(proto.Message): - r"""A unit of scheduled work. - Attributes: - name (str): - Optionally caller-specified in - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - - The task name. - - The task name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For - more information, see `Identifying - projects `__ - - ``LOCATION_ID`` is the canonical ID for the task's - location. The list of available locations can be obtained - by calling - [ListLocations][google.cloud.location.Locations.ListLocations]. - For more information, see - https://cloud.google.com/about/locations/. - - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), or hyphens (-). The maximum length is 100 - characters. - - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), or underscores (_). The maximum - length is 500 characters. - app_engine_http_request (google.cloud.tasks_v2beta3.types.AppEngineHttpRequest): - HTTP request that is sent to the App Engine app handler. - - An App Engine task is a task that has - [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] - set. - http_request (google.cloud.tasks_v2beta3.types.HttpRequest): - HTTP request that is sent to the task's target. - - An HTTP task is a task that has - [HttpRequest][google.cloud.tasks.v2beta3.HttpRequest] set. - pull_message (google.cloud.tasks_v2beta3.types.PullMessage): - Pull Message contained in a task in a - [PULL][google.cloud.tasks.v2beta3.Queue.type] queue type. - This payload type cannot be explicitly set through Cloud - Tasks API. Its purpose, currently is to provide backward - compatibility with App Engine Task Queue - `pull `__ - queues to provide a way to inspect contents of pull tasks - through the - [CloudTasks.GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the task is scheduled to be attempted. - - For App Engine queues, this is when the task will be - attempted or retried. - - ``schedule_time`` will be truncated to the nearest - microsecond. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that the task was created. - - ``create_time`` will be truncated to the nearest second. - dispatch_deadline (google.protobuf.duration_pb2.Duration): - The deadline for requests sent to the worker. If the worker - does not respond by this deadline then the request is - cancelled and the attempt is marked as a - ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the - task according to the - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. - - Note that when the request is cancelled, Cloud Tasks will - stop listening for the response, but whether the worker - stops processing depends on the worker. For example, if the - worker is stuck, it may not react to cancelled requests. - - The default and maximum values depend on the type of - request: - - - For [HTTP tasks][google.cloud.tasks.v2beta3.HttpRequest], - the default is 10 minutes. The deadline must be in the - interval [15 seconds, 30 minutes]. - - - For [App Engine - tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest], - 0 indicates that the request has the default deadline. - The default deadline depends on the `scaling - type `__ - of the service: 10 minutes for standard apps with - automatic scaling, 24 hours for standard apps with manual - and basic scaling, and 60 minutes for flex apps. If the - request deadline is set, it must be in the interval [15 - seconds, 24 hours 15 seconds]. Regardless of the task's - ``dispatch_deadline``, the app handler will not run for - longer than than the service's timeout. We recommend - setting the ``dispatch_deadline`` to at most a few - seconds more than the app handler's timeout. For more - information see - `Timeouts `__. - - ``dispatch_deadline`` will be truncated to the nearest - millisecond. The deadline is an approximate deadline. - dispatch_count (int): - Output only. The number of attempts - dispatched. - This count includes attempts which have been - dispatched but haven't received a response. - response_count (int): - Output only. The number of attempts which - have received a response. - first_attempt (google.cloud.tasks_v2beta3.types.Attempt): - Output only. The status of the task's first attempt. - - Only - [dispatch_time][google.cloud.tasks.v2beta3.Attempt.dispatch_time] - will be set. The other - [Attempt][google.cloud.tasks.v2beta3.Attempt] information is - not retained by Cloud Tasks. - last_attempt (google.cloud.tasks_v2beta3.types.Attempt): - Output only. The status of the task's last - attempt. - view (google.cloud.tasks_v2beta3.types.Task.View): - Output only. The view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] has been returned. - """ - class View(proto.Enum): - r"""The view specifies a subset of - [Task][google.cloud.tasks.v2beta3.Task] data. - - When a task is returned in a response, not all information is - retrieved by default because some data, such as payloads, might be - desirable to return only when needed because of its large size or - because of the sensitivity of data that it contains. - """ - VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - app_engine_http_request = proto.Field( - proto.MESSAGE, - number=3, - oneof='payload_type', - message=target.AppEngineHttpRequest, - ) - http_request = proto.Field( - proto.MESSAGE, - number=11, - oneof='payload_type', - message=target.HttpRequest, - ) - pull_message = proto.Field( - proto.MESSAGE, - number=13, - oneof='payload_type', - message=target.PullMessage, - ) - schedule_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - create_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - dispatch_deadline = proto.Field( - proto.MESSAGE, - number=12, - message=duration_pb2.Duration, - ) - dispatch_count = proto.Field( - proto.INT32, - number=6, - ) - response_count = proto.Field( - proto.INT32, - number=7, - ) - first_attempt = proto.Field( - proto.MESSAGE, - number=8, - message='Attempt', - ) - last_attempt = proto.Field( - proto.MESSAGE, - number=9, - message='Attempt', - ) - view = proto.Field( - proto.ENUM, - number=10, - enum=View, - ) - - -class Attempt(proto.Message): - r"""The status of a task attempt. - Attributes: - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt was scheduled. - - ``schedule_time`` will be truncated to the nearest - microsecond. - dispatch_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt was dispatched. - - ``dispatch_time`` will be truncated to the nearest - microsecond. - response_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time that this attempt response was - received. - - ``response_time`` will be truncated to the nearest - microsecond. - response_status (google.rpc.status_pb2.Status): - Output only. The response from the worker for this attempt. - - If ``response_time`` is unset, then the task has not been - attempted or is currently running and the - ``response_status`` field is meaningless. - """ - - schedule_time = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - dispatch_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - response_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - response_status = proto.Field( - proto.MESSAGE, - number=4, - message=status_pb2.Status, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2beta3/mypy.ini b/owl-bot-staging/v2beta3/mypy.ini deleted file mode 100644 index 4505b485..00000000 --- a/owl-bot-staging/v2beta3/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v2beta3/noxfile.py b/owl-bot-staging/v2beta3/noxfile.py deleted file mode 100644 index ab1166a7..00000000 --- a/owl-bot-staging/v2beta3/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/tasks_v2beta3/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py b/owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py deleted file mode 100644 index e9fe202e..00000000 --- a/owl-bot-staging/v2beta3/scripts/fixup_tasks_v2beta3_keywords.py +++ /dev/null @@ -1,191 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class tasksCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_queue': ('parent', 'queue', ), - 'create_task': ('parent', 'task', 'response_view', ), - 'delete_queue': ('name', ), - 'delete_task': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_queue': ('name', 'read_mask', ), - 'get_task': ('name', 'response_view', ), - 'list_queues': ('parent', 'filter', 'page_size', 'page_token', 'read_mask', ), - 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), - 'pause_queue': ('name', ), - 'purge_queue': ('name', ), - 'resume_queue': ('name', ), - 'run_task': ('name', 'response_view', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_queue': ('queue', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=tasksCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the tasks client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2beta3/setup.py b/owl-bot-staging/v2beta3/setup.py deleted file mode 100644 index 2254eec1..00000000 --- a/owl-bot-staging/v2beta3/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-tasks', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v2beta3/tests/__init__.py b/owl-bot-staging/v2beta3/tests/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta3/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta3/tests/unit/__init__.py b/owl-bot-staging/v2beta3/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta3/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py b/owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta3/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py b/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py deleted file mode 100644 index fa0bc47b..00000000 --- a/owl-bot-staging/v2beta3/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py +++ /dev/null @@ -1,5211 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksAsyncClient -from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksClient -from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers -from google.cloud.tasks_v2beta3.services.cloud_tasks import transports -from google.cloud.tasks_v2beta3.services.cloud_tasks.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.tasks_v2beta3.types import cloudtasks -from google.cloud.tasks_v2beta3.types import queue -from google.cloud.tasks_v2beta3.types import queue as gct_queue -from google.cloud.tasks_v2beta3.types import target -from google.cloud.tasks_v2beta3.types import task -from google.cloud.tasks_v2beta3.types import task as gct_task -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudTasksClient._get_default_mtls_endpoint(None) is None - assert CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_called_with(True) - - -@pytest.mark.parametrize("client_class", [ - CloudTasksClient, - CloudTasksAsyncClient, -]) -def test_cloud_tasks_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -def test_cloud_tasks_client_get_transport_class(): - transport = CloudTasksClient.get_transport_class() - available_transports = [ - transports.CloudTasksGrpcTransport, - ] - assert transport in available_transports - - transport = CloudTasksClient.get_transport_class("grpc") - assert transport == transports.CloudTasksGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) -@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) -def test_cloud_tasks_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudTasksClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)) -@mock.patch.object(CloudTasksAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_tasks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_cloud_tasks_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), - (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_cloud_tasks_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_cloud_tasks_client_client_options_from_dict(): - with mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = CloudTasksClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_list_queues(transport: str = 'grpc', request_type=cloudtasks.ListQueuesRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListQueuesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_queues_from_dict(): - test_list_queues(request_type=dict) - - -def test_list_queues_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - client.list_queues() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - -@pytest.mark.asyncio -async def test_list_queues_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListQueuesRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListQueuesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListQueuesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_queues_async_from_dict(): - await test_list_queues_async(request_type=dict) - - -def test_list_queues_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListQueuesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - call.return_value = cloudtasks.ListQueuesResponse() - client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_queues_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListQueuesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) - await client.list_queues(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_queues_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_queues( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_queues_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_queues( - cloudtasks.ListQueuesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_queues_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListQueuesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListQueuesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_queues( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_queues_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_queues( - cloudtasks.ListQueuesRequest(), - parent='parent_value', - ) - - -def test_list_queues_pager(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_queues(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, queue.Queue) - for i in results) - -def test_list_queues_pages(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - pages = list(client.list_queues(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_queues_async_pager(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_queues(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, queue.Queue) - for i in responses) - -@pytest.mark.asyncio -async def test_list_queues_async_pages(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_queues), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - queue.Queue(), - ], - next_page_token='abc', - ), - cloudtasks.ListQueuesResponse( - queues=[], - next_page_token='def', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - ], - next_page_token='ghi', - ), - cloudtasks.ListQueuesResponse( - queues=[ - queue.Queue(), - queue.Queue(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_queues(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_queue(transport: str = 'grpc', request_type=cloudtasks.GetQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -def test_get_queue_from_dict(): - test_get_queue(request_type=dict) - - -def test_get_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - client.get_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - -@pytest.mark.asyncio -async def test_get_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - )) - response = await client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -@pytest.mark.asyncio -async def test_get_queue_async_from_dict(): - await test_get_queue_async(request_type=dict) - - -def test_get_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.get_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_queue( - cloudtasks.GetQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_queue( - cloudtasks.GetQueueRequest(), - name='name_value', - ) - - -def test_create_queue(transport: str = 'grpc', request_type=cloudtasks.CreateQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - type_=gct_queue.Queue.Type.PULL, - app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - assert response.type_ == gct_queue.Queue.Type.PULL - - -def test_create_queue_from_dict(): - test_create_queue(request_type=dict) - - -def test_create_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - client.create_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - -@pytest.mark.asyncio -async def test_create_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - type_=gct_queue.Queue.Type.PULL, - )) - response = await client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - assert response.type_ == gct_queue.Queue.Type.PULL - - -@pytest.mark.asyncio -async def test_create_queue_async_from_dict(): - await test_create_queue_async(request_type=dict) - - -def test_create_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateQueueRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - call.return_value = gct_queue.Queue() - client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateQueueRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - await client.create_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_queue( - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].queue == gct_queue.Queue(name='name_value') - - -def test_create_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_queue( - cloudtasks.CreateQueueRequest(), - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_queue( - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].queue == gct_queue.Queue(name='name_value') - - -@pytest.mark.asyncio -async def test_create_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_queue( - cloudtasks.CreateQueueRequest(), - parent='parent_value', - queue=gct_queue.Queue(name='name_value'), - ) - - -def test_update_queue(transport: str = 'grpc', request_type=cloudtasks.UpdateQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - type_=gct_queue.Queue.Type.PULL, - app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - assert response.type_ == gct_queue.Queue.Type.PULL - - -def test_update_queue_from_dict(): - test_update_queue(request_type=dict) - - -def test_update_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - client.update_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - -@pytest.mark.asyncio -async def test_update_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.UpdateQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue( - name='name_value', - state=gct_queue.Queue.State.RUNNING, - type_=gct_queue.Queue.Type.PULL, - )) - response = await client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.UpdateQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_queue.Queue) - assert response.name == 'name_value' - assert response.state == gct_queue.Queue.State.RUNNING - assert response.type_ == gct_queue.Queue.Type.PULL - - -@pytest.mark.asyncio -async def test_update_queue_async_from_dict(): - await test_update_queue_async(request_type=dict) - - -def test_update_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.UpdateQueueRequest() - - request.queue.name = 'queue.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - call.return_value = gct_queue.Queue() - client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'queue.name=queue.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.UpdateQueueRequest() - - request.queue.name = 'queue.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - await client.update_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'queue.name=queue.name/value', - ) in kw['metadata'] - - -def test_update_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_queue( - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].queue == gct_queue.Queue(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_queue( - cloudtasks.UpdateQueueRequest(), - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_queue( - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].queue == gct_queue.Queue(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_queue( - cloudtasks.UpdateQueueRequest(), - queue=gct_queue.Queue(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_queue(transport: str = 'grpc', request_type=cloudtasks.DeleteQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_queue_from_dict(): - test_delete_queue(request_type=dict) - - -def test_delete_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - client.delete_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - -@pytest.mark.asyncio -async def test_delete_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteQueueRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_queue_async_from_dict(): - await test_delete_queue_async(request_type=dict) - - -def test_delete_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - call.return_value = None - client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_queue( - cloudtasks.DeleteQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_queue( - cloudtasks.DeleteQueueRequest(), - name='name_value', - ) - - -def test_purge_queue(transport: str = 'grpc', request_type=cloudtasks.PurgeQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -def test_purge_queue_from_dict(): - test_purge_queue(request_type=dict) - - -def test_purge_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - client.purge_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - -@pytest.mark.asyncio -async def test_purge_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PurgeQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - )) - response = await client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PurgeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -@pytest.mark.asyncio -async def test_purge_queue_async_from_dict(): - await test_purge_queue_async(request_type=dict) - - -def test_purge_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PurgeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_purge_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PurgeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.purge_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_purge_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.purge_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_purge_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.purge_queue( - cloudtasks.PurgeQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_purge_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.purge_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.purge_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_purge_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.purge_queue( - cloudtasks.PurgeQueueRequest(), - name='name_value', - ) - - -def test_pause_queue(transport: str = 'grpc', request_type=cloudtasks.PauseQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -def test_pause_queue_from_dict(): - test_pause_queue(request_type=dict) - - -def test_pause_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - client.pause_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - -@pytest.mark.asyncio -async def test_pause_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.PauseQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - )) - response = await client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.PauseQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -@pytest.mark.asyncio -async def test_pause_queue_async_from_dict(): - await test_pause_queue_async(request_type=dict) - - -def test_pause_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PauseQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_pause_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.PauseQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.pause_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_pause_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.pause_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_pause_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.pause_queue( - cloudtasks.PauseQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_pause_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pause_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.pause_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_pause_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.pause_queue( - cloudtasks.PauseQueueRequest(), - name='name_value', - ) - - -def test_resume_queue(transport: str = 'grpc', request_type=cloudtasks.ResumeQueueRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - app_engine_http_queue=target.AppEngineHttpQueue(app_engine_routing_override=target.AppEngineRouting(service='service_value')), - ) - response = client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -def test_resume_queue_from_dict(): - test_resume_queue(request_type=dict) - - -def test_resume_queue_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - client.resume_queue() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - -@pytest.mark.asyncio -async def test_resume_queue_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ResumeQueueRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue( - name='name_value', - state=queue.Queue.State.RUNNING, - type_=queue.Queue.Type.PULL, - )) - response = await client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ResumeQueueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, queue.Queue) - assert response.name == 'name_value' - assert response.state == queue.Queue.State.RUNNING - assert response.type_ == queue.Queue.Type.PULL - - -@pytest.mark.asyncio -async def test_resume_queue_async_from_dict(): - await test_resume_queue_async(request_type=dict) - - -def test_resume_queue_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ResumeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - call.return_value = queue.Queue() - client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_resume_queue_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ResumeQueueRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - await client.resume_queue(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_resume_queue_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resume_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_resume_queue_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resume_queue( - cloudtasks.ResumeQueueRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_resume_queue_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resume_queue), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = queue.Queue() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.resume_queue( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_resume_queue_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.resume_queue( - cloudtasks.ResumeQueueRequest(), - name='name_value', - ) - - -def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_from_dict(): - test_get_iam_policy(request_type=dict) - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_get_iam_policy_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_from_dict(): - test_set_iam_policy(request_type=dict) - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_set_iam_policy_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_from_dict(): - test_test_iam_permissions(request_type=dict) - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -def test_test_iam_permissions_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_list_tasks(transport: str = 'grpc', request_type=cloudtasks.ListTasksRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse( - next_page_token='next_page_token_value', - ) - response = client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_tasks_from_dict(): - test_list_tasks(request_type=dict) - - -def test_list_tasks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - client.list_tasks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - -@pytest.mark.asyncio -async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.ListTasksRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.ListTasksRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) - - -def test_list_tasks_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = cloudtasks.ListTasksResponse() - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.ListTasksRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_tasks_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_tasks_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - cloudtasks.ListTasksRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_tasks_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = cloudtasks.ListTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudtasks.ListTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tasks( - cloudtasks.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_pager(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tasks(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, task.Task) - for i in results) - -def test_list_tasks_pages(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, task.Task) - for i in responses) - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - cloudtasks.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - cloudtasks.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_tasks(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_task(transport: str = 'grpc', request_type=cloudtasks.GetTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -def test_get_task_from_dict(): - test_get_task(request_type=dict) - - -def test_get_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - client.get_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - -@pytest.mark.asyncio -async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.GetTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - )) - response = await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.GetTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) - - -def test_get_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = task.Task() - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.GetTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - cloudtasks.GetTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_task( - cloudtasks.GetTaskRequest(), - name='name_value', - ) - - -def test_create_task(transport: str = 'grpc', request_type=cloudtasks.CreateTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=gct_task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == gct_task.Task.View.BASIC - - -def test_create_task_from_dict(): - test_create_task(request_type=dict) - - -def test_create_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - client.create_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - -@pytest.mark.asyncio -async def test_create_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.CreateTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=gct_task.Task.View.BASIC, - )) - response = await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.CreateTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gct_task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == gct_task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_create_task_async_from_dict(): - await test_create_task_async(request_type=dict) - - -def test_create_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateTaskRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = gct_task.Task() - client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.CreateTaskRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) - await client.create_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_task( - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].task == gct_task.Task(name='name_value') - - -def test_create_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_task( - cloudtasks.CreateTaskRequest(), - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gct_task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_task( - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].task == gct_task.Task(name='name_value') - - -@pytest.mark.asyncio -async def test_create_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_task( - cloudtasks.CreateTaskRequest(), - parent='parent_value', - task=gct_task.Task(name='name_value'), - ) - - -def test_delete_task(transport: str = 'grpc', request_type=cloudtasks.DeleteTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_task_from_dict(): - test_delete_task(request_type=dict) - - -def test_delete_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - client.delete_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - -@pytest.mark.asyncio -async def test_delete_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.DeleteTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.DeleteTaskRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_task_async_from_dict(): - await test_delete_task_async(request_type=dict) - - -def test_delete_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = None - client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.DeleteTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_task( - cloudtasks.DeleteTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_task( - cloudtasks.DeleteTaskRequest(), - name='name_value', - ) - - -def test_run_task(transport: str = 'grpc', request_type=cloudtasks.RunTaskRequest): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - app_engine_http_request=target.AppEngineHttpRequest(http_method=target.HttpMethod.POST), - ) - response = client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -def test_run_task_from_dict(): - test_run_task(request_type=dict) - - -def test_run_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - client.run_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - -@pytest.mark.asyncio -async def test_run_task_async(transport: str = 'grpc_asyncio', request_type=cloudtasks.RunTaskRequest): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - dispatch_count=1496, - response_count=1527, - view=task.Task.View.BASIC, - )) - response = await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == cloudtasks.RunTaskRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - assert response.dispatch_count == 1496 - assert response.response_count == 1527 - assert response.view == task.Task.View.BASIC - - -@pytest.mark.asyncio -async def test_run_task_async_from_dict(): - await test_run_task_async(request_type=dict) - - -def test_run_task_field_headers(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RunTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = task.Task() - client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_run_task_field_headers_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudtasks.RunTaskRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.run_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_run_task_flattened(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_run_task_flattened_error(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.run_task( - cloudtasks.RunTaskRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_run_task_flattened_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.run_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_run_task_flattened_error_async(): - client = CloudTasksAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.run_task( - cloudtasks.RunTaskRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudTasksClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudTasksClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudTasksGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudTasksGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudTasksGrpcTransport, - ) - -def test_cloud_tasks_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudTasksTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_cloud_tasks_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CloudTasksTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_queues', - 'get_queue', - 'create_queue', - 'update_queue', - 'delete_queue', - 'purge_queue', - 'pause_queue', - 'resume_queue', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - 'list_tasks', - 'get_task', - 'create_task', - 'delete_task', - 'run_task', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - -def test_cloud_tasks_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudTasksTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudTasksClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudTasksClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_cloud_tasks_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudTasksGrpcTransport, - transports.CloudTasksGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudtasks.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_cloud_tasks_host_no_port(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com'), - ) - assert client.transport._host == 'cloudtasks.googleapis.com:443' - - -def test_cloud_tasks_host_with_port(): - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudtasks.googleapis.com:8000'), - ) - assert client.transport._host == 'cloudtasks.googleapis.com:8000' - -def test_cloud_tasks_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudTasksGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_tasks_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudTasksGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport]) -def test_cloud_tasks_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_queue_path(): - project = "squid" - location = "clam" - queue = "whelk" - expected = "projects/{project}/locations/{location}/queues/{queue}".format(project=project, location=location, queue=queue, ) - actual = CloudTasksClient.queue_path(project, location, queue) - assert expected == actual - - -def test_parse_queue_path(): - expected = { - "project": "octopus", - "location": "oyster", - "queue": "nudibranch", - } - path = CloudTasksClient.queue_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_queue_path(path) - assert expected == actual - -def test_task_path(): - project = "cuttlefish" - location = "mussel" - queue = "winkle" - task = "nautilus" - expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(project=project, location=location, queue=queue, task=task, ) - actual = CloudTasksClient.task_path(project, location, queue, task) - assert expected == actual - - -def test_parse_task_path(): - expected = { - "project": "scallop", - "location": "abalone", - "queue": "squid", - "task": "clam", - } - path = CloudTasksClient.task_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_task_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CloudTasksClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = CloudTasksClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = CloudTasksClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = CloudTasksClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CloudTasksClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = CloudTasksClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = CloudTasksClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = CloudTasksClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CloudTasksClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = CloudTasksClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudTasksClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: - client = CloudTasksClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.CloudTasksTransport, '_prep_wrapped_messages') as prep: - transport_class = CloudTasksClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/tasks_v2/test_cloud_tasks.py b/tests/unit/gapic/tasks_v2/test_cloud_tasks.py index f2f3cda1..c1982ca9 100644 --- a/tests/unit/gapic/tasks_v2/test_cloud_tasks.py +++ b/tests/unit/gapic/tasks_v2/test_cloud_tasks.py @@ -35,7 +35,6 @@ from google.cloud.tasks_v2.services.cloud_tasks import CloudTasksClient from google.cloud.tasks_v2.services.cloud_tasks import pagers from google.cloud.tasks_v2.services.cloud_tasks import transports -from google.cloud.tasks_v2.services.cloud_tasks.transports.base import _API_CORE_VERSION from google.cloud.tasks_v2.services.cloud_tasks.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -58,8 +57,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -70,16 +70,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -137,6 +127,16 @@ def test_cloud_tasks_client_from_service_account_info(client_class): assert client.transport._host == "cloudtasks.googleapis.com:443" +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient,]) +def test_cloud_tasks_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient,]) def test_cloud_tasks_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -4186,7 +4186,6 @@ def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -4215,79 +4214,6 @@ def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_tasks_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_tasks_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], diff --git a/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py index 8a11b7fb..d1a9bb05 100644 --- a/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py +++ b/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py @@ -35,9 +35,6 @@ from google.cloud.tasks_v2beta2.services.cloud_tasks import CloudTasksClient from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers from google.cloud.tasks_v2beta2.services.cloud_tasks import transports -from google.cloud.tasks_v2beta2.services.cloud_tasks.transports.base import ( - _API_CORE_VERSION, -) from google.cloud.tasks_v2beta2.services.cloud_tasks.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -60,8 +57,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -72,16 +70,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -139,6 +127,16 @@ def test_cloud_tasks_client_from_service_account_info(client_class): assert client.transport._host == "cloudtasks.googleapis.com:443" +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient,]) +def test_cloud_tasks_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient,]) def test_cloud_tasks_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -3906,7 +3904,9 @@ def test_lease_tasks_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" - # assert args[0].lease_duration == duration_pb2.Duration(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) def test_lease_tasks_flattened_error(): @@ -3945,7 +3945,9 @@ async def test_lease_tasks_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" - # assert args[0].lease_duration == duration_pb2.Duration(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) @pytest.mark.asyncio @@ -4106,7 +4108,9 @@ def test_acknowledge_task_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" - # assert args[0].schedule_time == timestamp_pb2.Timestamp(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) def test_acknowledge_task_flattened_error(): @@ -4143,7 +4147,9 @@ async def test_acknowledge_task_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" - # assert args[0].schedule_time == timestamp_pb2.Timestamp(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) @pytest.mark.asyncio @@ -4318,8 +4324,12 @@ def test_renew_lease_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" - # assert args[0].schedule_time == timestamp_pb2.Timestamp(seconds=751) - # assert args[0].lease_duration == duration_pb2.Duration(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) def test_renew_lease_flattened_error(): @@ -4359,8 +4369,12 @@ async def test_renew_lease_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" - # assert args[0].schedule_time == timestamp_pb2.Timestamp(seconds=751) - # assert args[0].lease_duration == duration_pb2.Duration(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) @pytest.mark.asyncio @@ -4534,7 +4548,9 @@ def test_cancel_lease_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" - # assert args[0].schedule_time == timestamp_pb2.Timestamp(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) def test_cancel_lease_flattened_error(): @@ -4571,7 +4587,9 @@ async def test_cancel_lease_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" - # assert args[0].schedule_time == timestamp_pb2.Timestamp(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) @pytest.mark.asyncio @@ -5025,7 +5043,6 @@ def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -5054,79 +5071,6 @@ def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_tasks_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_tasks_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], diff --git a/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py index 185aa41c..6463da50 100644 --- a/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py +++ b/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py @@ -35,9 +35,6 @@ from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksClient from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers from google.cloud.tasks_v2beta3.services.cloud_tasks import transports -from google.cloud.tasks_v2beta3.services.cloud_tasks.transports.base import ( - _API_CORE_VERSION, -) from google.cloud.tasks_v2beta3.services.cloud_tasks.transports.base import ( _GOOGLE_AUTH_VERSION, ) @@ -60,8 +57,9 @@ import google.auth -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -72,16 +70,6 @@ reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -139,6 +127,16 @@ def test_cloud_tasks_client_from_service_account_info(client_class): assert client.transport._host == "cloudtasks.googleapis.com:443" +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient,]) +def test_cloud_tasks_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient,]) def test_cloud_tasks_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -4266,7 +4264,6 @@ def test_cloud_tasks_transport_auth_adc_old_google_auth(transport_class): (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), ], ) -@requires_api_core_gte_1_26_0 def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -4295,79 +4292,6 @@ def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_tasks_transport_create_channel_old_api_core( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudTasksGrpcTransport, grpc_helpers), - (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -@requires_api_core_lt_1_26_0 -def test_cloud_tasks_transport_create_channel_user_scopes( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "cloudtasks.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "transport_class", [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], From e3881f300a00a1c01ae3f6557386b13e75e146eb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Tue, 22 Jun 2021 23:02:17 +0000 Subject: [PATCH 3/4] fix: require google-api-core>=1.26.0 --- setup.py | 2 +- testing/constraints-3.6.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index af750040..caf1a890 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ version = "2.3.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "google-api-core[grpc] >= 1.26.0, <2.0.0dev"," "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "proto-plus >= 0.4.0", "packaging >= 14.3", diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index ac3ae21c..c4ecfd63 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.2 +google-api-core==1.26.0 grpc-google-iam-v1==0.12.3 proto-plus==0.4.0 libcst==0.2.5 From 6bd435fe490bb629015c9aac2193e0cf1a2f7c9b Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Tue, 22 Jun 2021 23:11:25 +0000 Subject: [PATCH 4/4] fix: fix typo in setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index caf1a890..728bcbac 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ version = "2.3.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.26.0, <2.0.0dev"," + "google-api-core[grpc] >= 1.26.0, <2.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "proto-plus >= 0.4.0", "packaging >= 14.3",