From 18e146cab5e2b669538ca6b1d58603e72d58ae88 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 2 Sep 2020 12:54:12 -0700 Subject: [PATCH] feat!: migrate to use microgen (#38) * feat!: migrate to use microgen * Update UPGRADING.md Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .coveragerc | 13 +- .../__init__.py => .github/snippet-bot.yml | 0 README.rst | 6 +- UPGRADING.md | 157 + docs/UPGRADING.md | 1 + docs/gapic/v2/api.rst | 6 - docs/gapic/v2/types.rst | 5 - docs/gapic/v2beta2/api.rst | 6 - docs/gapic/v2beta2/types.rst | 5 - docs/gapic/v2beta3/api.rst | 6 - docs/gapic/v2beta3/types.rst | 5 - docs/index.rst | 22 +- docs/tasks_v2/services.rst | 6 + docs/tasks_v2/types.rst | 5 + docs/tasks_v2beta2/services.rst | 6 + docs/tasks_v2beta2/types.rst | 5 + docs/tasks_v2beta3/services.rst | 6 + docs/tasks_v2beta3/types.rst | 5 + google/cloud/tasks/__init__.py | 80 + google/cloud/tasks/py.typed | 2 + google/cloud/tasks_v2/__init__.py | 84 +- .../tasks_v2/gapic/cloud_tasks_client.py | 1682 ------ .../gapic/cloud_tasks_client_config.py | 122 - google/cloud/tasks_v2/gapic/enums.py | 113 - .../tasks_v2/gapic/transports/__init__.py | 0 .../transports/cloud_tasks_grpc_transport.py | 428 -- google/cloud/tasks_v2/proto/__init__.py | 0 google/cloud/tasks_v2/proto/cloudtasks_pb2.py | 1611 ------ .../tasks_v2/proto/cloudtasks_pb2_grpc.py | 880 --- google/cloud/tasks_v2/proto/queue_pb2.py | 799 --- google/cloud/tasks_v2/proto/queue_pb2_grpc.py | 3 - google/cloud/tasks_v2/proto/target_pb2.py | 1147 ---- .../cloud/tasks_v2/proto/target_pb2_grpc.py | 3 - google/cloud/tasks_v2/proto/task_pb2.py | 604 -- google/cloud/tasks_v2/proto/task_pb2_grpc.py | 3 - google/cloud/tasks_v2/py.typed | 2 + google/cloud/tasks_v2/services/__init__.py | 16 + .../services/cloud_tasks}/__init__.py | 18 +- .../services/cloud_tasks/async_client.py | 1729 ++++++ .../tasks_v2/services/cloud_tasks/client.py | 1837 ++++++ .../tasks_v2/services/cloud_tasks/pagers.py | 278 + .../cloud_tasks/transports/__init__.py | 36 + .../services/cloud_tasks/transports/base.py | 397 ++ .../services/cloud_tasks/transports/grpc.py | 755 +++ .../cloud_tasks/transports/grpc_asyncio.py | 767 +++ google/cloud/tasks_v2/types.py | 72 - google/cloud/tasks_v2/types/__init__.py | 81 + google/cloud/tasks_v2/types/cloudtasks.py | 478 ++ google/cloud/tasks_v2/types/queue.py | 394 ++ google/cloud/tasks_v2/types/target.py | 504 ++ google/cloud/tasks_v2/types/task.py | 225 + google/cloud/tasks_v2beta2/__init__.py | 94 +- google/cloud/tasks_v2beta2/gapic/__init__.py | 0 .../tasks_v2beta2/gapic/cloud_tasks_client.py | 2147 ------- .../gapic/cloud_tasks_client_config.py | 142 - google/cloud/tasks_v2beta2/gapic/enums.py | 112 - .../gapic/transports/__init__.py | 0 .../transports/cloud_tasks_grpc_transport.py | 512 -- google/cloud/tasks_v2beta2/proto/__init__.py | 0 .../tasks_v2beta2/proto/cloudtasks_pb2.py | 2310 -------- .../proto/cloudtasks_pb2_grpc.py | 1103 ---- google/cloud/tasks_v2beta2/proto/queue_pb2.py | 801 --- .../tasks_v2beta2/proto/queue_pb2_grpc.py | 3 - .../cloud/tasks_v2beta2/proto/target_pb2.py | 865 --- .../tasks_v2beta2/proto/target_pb2_grpc.py | 3 - google/cloud/tasks_v2beta2/proto/task_pb2.py | 626 -- .../tasks_v2beta2/proto/task_pb2_grpc.py | 3 - google/cloud/tasks_v2beta2/py.typed | 2 + .../cloud/tasks_v2beta2/services/__init__.py | 16 + .../services/cloud_tasks}/__init__.py | 18 +- .../services/cloud_tasks/async_client.py | 2158 +++++++ .../services/cloud_tasks/client.py | 2274 ++++++++ .../services/cloud_tasks/pagers.py | 278 + .../cloud_tasks/transports/__init__.py | 36 + .../services/cloud_tasks/transports/base.py | 448 ++ .../services/cloud_tasks/transports/grpc.py | 914 +++ .../cloud_tasks/transports/grpc_asyncio.py | 932 +++ google/cloud/tasks_v2beta2/types.py | 72 - google/cloud/tasks_v2beta2/types/__init__.py | 91 + .../cloud/tasks_v2beta2/types/cloudtasks.py | 725 +++ google/cloud/tasks_v2beta2/types/queue.py | 385 ++ google/cloud/tasks_v2beta2/types/target.py | 461 ++ google/cloud/tasks_v2beta2/types/task.py | 219 + google/cloud/tasks_v2beta3/__init__.py | 86 +- google/cloud/tasks_v2beta3/gapic/__init__.py | 0 .../tasks_v2beta3/gapic/cloud_tasks_client.py | 1700 ------ .../gapic/cloud_tasks_client_config.py | 122 - google/cloud/tasks_v2beta3/gapic/enums.py | 127 - .../gapic/transports/__init__.py | 0 .../transports/cloud_tasks_grpc_transport.py | 428 -- google/cloud/tasks_v2beta3/proto/__init__.py | 0 .../tasks_v2beta3/proto/cloudtasks_pb2.py | 1612 ------ .../proto/cloudtasks_pb2_grpc.py | 880 --- google/cloud/tasks_v2beta3/proto/queue_pb2.py | 873 --- .../tasks_v2beta3/proto/queue_pb2_grpc.py | 3 - .../cloud/tasks_v2beta3/proto/target_pb2.py | 1213 ---- .../tasks_v2beta3/proto/target_pb2_grpc.py | 3 - google/cloud/tasks_v2beta3/proto/task_pb2.py | 607 -- .../tasks_v2beta3/proto/task_pb2_grpc.py | 3 - google/cloud/tasks_v2beta3/py.typed | 2 + .../cloud/tasks_v2beta3/services/__init__.py | 16 + .../services/cloud_tasks/__init__.py} | 17 +- .../services/cloud_tasks/async_client.py | 1731 ++++++ .../services/cloud_tasks/client.py | 1839 ++++++ .../services/cloud_tasks/pagers.py | 278 + .../cloud_tasks/transports/__init__.py | 36 + .../services/cloud_tasks/transports/base.py | 397 ++ .../services/cloud_tasks/transports/grpc.py | 756 +++ .../cloud_tasks/transports/grpc_asyncio.py | 768 +++ google/cloud/tasks_v2beta3/types.py | 72 - google/cloud/tasks_v2beta3/types/__init__.py | 83 + .../cloud/tasks_v2beta3/types/cloudtasks.py | 479 ++ google/cloud/tasks_v2beta3/types/queue.py | 406 ++ google/cloud/tasks_v2beta3/types/target.py | 535 ++ google/cloud/tasks_v2beta3/types/task.py | 229 + mypy.ini | 3 + noxfile.py | 8 +- samples/snippets/create_http_task.py | 74 +- samples/snippets/create_http_task_test.py | 20 +- .../snippets/create_http_task_with_token.py | 42 +- .../create_http_task_with_token_test.py | 28 +- samples/snippets/create_queue.py | 10 +- samples/snippets/create_queue_test.py | 10 +- samples/snippets/delete_queue.py | 6 +- samples/snippets/delete_queue_test.py | 23 +- samples/snippets/list_queues.py | 12 +- samples/snippets/list_queues_test.py | 19 +- scripts/fixup_tasks_v2_keywords.py | 193 + scripts/fixup_tasks_v2beta2_keywords.py | 197 + scripts/fixup_tasks_v2beta3_keywords.py | 193 + setup.py | 20 +- synth.metadata | 6 +- synth.py | 92 +- tests/system/gapic/v2/test_system_tasks_v2.py | 5 +- tests/unit/gapic/tasks_v2/__init__.py | 1 + tests/unit/gapic/tasks_v2/test_cloud_tasks.py | 4193 ++++++++++++++ tests/unit/gapic/tasks_v2beta2/__init__.py | 1 + .../gapic/tasks_v2beta2/test_cloud_tasks.py | 5026 +++++++++++++++++ tests/unit/gapic/tasks_v2beta3/__init__.py | 1 + .../gapic/tasks_v2beta3/test_cloud_tasks.py | 4283 ++++++++++++++ .../gapic/v2/test_cloud_tasks_client_v2.py | 703 --- .../test_cloud_tasks_client_v2beta2.py | 849 --- .../test_cloud_tasks_client_v2beta3.py | 703 --- 143 files changed, 38748 insertions(+), 26439 deletions(-) rename google/cloud/tasks_v2/gapic/__init__.py => .github/snippet-bot.yml (100%) create mode 100644 UPGRADING.md create mode 120000 docs/UPGRADING.md delete mode 100644 docs/gapic/v2/api.rst delete mode 100644 docs/gapic/v2/types.rst delete mode 100644 docs/gapic/v2beta2/api.rst delete mode 100644 docs/gapic/v2beta2/types.rst delete mode 100644 docs/gapic/v2beta3/api.rst delete mode 100644 docs/gapic/v2beta3/types.rst create mode 100644 docs/tasks_v2/services.rst create mode 100644 docs/tasks_v2/types.rst create mode 100644 docs/tasks_v2beta2/services.rst create mode 100644 docs/tasks_v2beta2/types.rst create mode 100644 docs/tasks_v2beta3/services.rst create mode 100644 docs/tasks_v2beta3/types.rst create mode 100644 google/cloud/tasks/__init__.py create mode 100644 google/cloud/tasks/py.typed delete mode 100644 google/cloud/tasks_v2/gapic/cloud_tasks_client.py delete mode 100644 google/cloud/tasks_v2/gapic/cloud_tasks_client_config.py delete mode 100644 google/cloud/tasks_v2/gapic/enums.py delete mode 100644 google/cloud/tasks_v2/gapic/transports/__init__.py delete mode 100644 google/cloud/tasks_v2/gapic/transports/cloud_tasks_grpc_transport.py delete mode 100644 google/cloud/tasks_v2/proto/__init__.py delete mode 100644 google/cloud/tasks_v2/proto/cloudtasks_pb2.py delete mode 100644 google/cloud/tasks_v2/proto/cloudtasks_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2/proto/queue_pb2.py delete mode 100644 google/cloud/tasks_v2/proto/queue_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2/proto/target_pb2.py delete mode 100644 google/cloud/tasks_v2/proto/target_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2/proto/task_pb2.py delete mode 100644 google/cloud/tasks_v2/proto/task_pb2_grpc.py create mode 100644 google/cloud/tasks_v2/py.typed create mode 100644 google/cloud/tasks_v2/services/__init__.py rename google/{ => cloud/tasks_v2/services/cloud_tasks}/__init__.py (71%) create mode 100644 google/cloud/tasks_v2/services/cloud_tasks/async_client.py create mode 100644 google/cloud/tasks_v2/services/cloud_tasks/client.py create mode 100644 google/cloud/tasks_v2/services/cloud_tasks/pagers.py create mode 100644 google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py create mode 100644 google/cloud/tasks_v2/services/cloud_tasks/transports/base.py create mode 100644 google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py create mode 100644 google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py delete mode 100644 google/cloud/tasks_v2/types.py create mode 100644 google/cloud/tasks_v2/types/__init__.py create mode 100644 google/cloud/tasks_v2/types/cloudtasks.py create mode 100644 google/cloud/tasks_v2/types/queue.py create mode 100644 google/cloud/tasks_v2/types/target.py create mode 100644 google/cloud/tasks_v2/types/task.py delete mode 100644 google/cloud/tasks_v2beta2/gapic/__init__.py delete mode 100644 google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py delete mode 100644 google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py delete mode 100644 google/cloud/tasks_v2beta2/gapic/enums.py delete mode 100644 google/cloud/tasks_v2beta2/gapic/transports/__init__.py delete mode 100644 google/cloud/tasks_v2beta2/gapic/transports/cloud_tasks_grpc_transport.py delete mode 100644 google/cloud/tasks_v2beta2/proto/__init__.py delete mode 100644 google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py delete mode 100644 google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2beta2/proto/queue_pb2.py delete mode 100644 google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2beta2/proto/target_pb2.py delete mode 100644 google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2beta2/proto/task_pb2.py delete mode 100644 google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py create mode 100644 google/cloud/tasks_v2beta2/py.typed create mode 100644 google/cloud/tasks_v2beta2/services/__init__.py rename google/cloud/{ => tasks_v2beta2/services/cloud_tasks}/__init__.py (71%) create mode 100644 google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py create mode 100644 google/cloud/tasks_v2beta2/services/cloud_tasks/client.py create mode 100644 google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py create mode 100644 google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py create mode 100644 google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py create mode 100644 google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py create mode 100644 google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py delete mode 100644 google/cloud/tasks_v2beta2/types.py create mode 100644 google/cloud/tasks_v2beta2/types/__init__.py create mode 100644 google/cloud/tasks_v2beta2/types/cloudtasks.py create mode 100644 google/cloud/tasks_v2beta2/types/queue.py create mode 100644 google/cloud/tasks_v2beta2/types/target.py create mode 100644 google/cloud/tasks_v2beta2/types/task.py delete mode 100644 google/cloud/tasks_v2beta3/gapic/__init__.py delete mode 100644 google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py delete mode 100644 google/cloud/tasks_v2beta3/gapic/cloud_tasks_client_config.py delete mode 100644 google/cloud/tasks_v2beta3/gapic/enums.py delete mode 100644 google/cloud/tasks_v2beta3/gapic/transports/__init__.py delete mode 100644 google/cloud/tasks_v2beta3/gapic/transports/cloud_tasks_grpc_transport.py delete mode 100644 google/cloud/tasks_v2beta3/proto/__init__.py delete mode 100644 google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py delete mode 100644 google/cloud/tasks_v2beta3/proto/cloudtasks_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2beta3/proto/queue_pb2.py delete mode 100644 google/cloud/tasks_v2beta3/proto/queue_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2beta3/proto/target_pb2.py delete mode 100644 google/cloud/tasks_v2beta3/proto/target_pb2_grpc.py delete mode 100644 google/cloud/tasks_v2beta3/proto/task_pb2.py delete mode 100644 google/cloud/tasks_v2beta3/proto/task_pb2_grpc.py create mode 100644 google/cloud/tasks_v2beta3/py.typed create mode 100644 google/cloud/tasks_v2beta3/services/__init__.py rename google/cloud/{tasks.py => tasks_v2beta3/services/cloud_tasks/__init__.py} (70%) create mode 100644 google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py create mode 100644 google/cloud/tasks_v2beta3/services/cloud_tasks/client.py create mode 100644 google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py create mode 100644 google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py create mode 100644 google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py create mode 100644 google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py create mode 100644 google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py delete mode 100644 google/cloud/tasks_v2beta3/types.py create mode 100644 google/cloud/tasks_v2beta3/types/__init__.py create mode 100644 google/cloud/tasks_v2beta3/types/cloudtasks.py create mode 100644 google/cloud/tasks_v2beta3/types/queue.py create mode 100644 google/cloud/tasks_v2beta3/types/target.py create mode 100644 google/cloud/tasks_v2beta3/types/task.py create mode 100644 mypy.ini create mode 100644 scripts/fixup_tasks_v2_keywords.py create mode 100644 scripts/fixup_tasks_v2beta2_keywords.py create mode 100644 scripts/fixup_tasks_v2beta3_keywords.py create mode 100644 tests/unit/gapic/tasks_v2/__init__.py create mode 100644 tests/unit/gapic/tasks_v2/test_cloud_tasks.py create mode 100644 tests/unit/gapic/tasks_v2beta2/__init__.py create mode 100644 tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py create mode 100644 tests/unit/gapic/tasks_v2beta3/__init__.py create mode 100644 tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py delete mode 100644 tests/unit/gapic/v2/test_cloud_tasks_client_v2.py delete mode 100644 tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py delete mode 100644 tests/unit/gapic/v2beta3/test_cloud_tasks_client_v2beta3.py diff --git a/.coveragerc b/.coveragerc index dd39c854..0284f6ba 100644 --- a/.coveragerc +++ b/.coveragerc @@ -21,15 +21,14 @@ branch = True [report] fail_under = 100 show_missing = True +omit = google/cloud/tasks/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound \ No newline at end of file diff --git a/google/cloud/tasks_v2/gapic/__init__.py b/.github/snippet-bot.yml similarity index 100% rename from google/cloud/tasks_v2/gapic/__init__.py rename to .github/snippet-bot.yml diff --git a/README.rst b/README.rst index 96328da5..24139fb8 100644 --- a/README.rst +++ b/README.rst @@ -50,11 +50,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.6 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python == 2.7. + +The last version of this library compatible with Python 2.7 is google-cloud-tasks==1.5.0. Mac/Linux diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 00000000..79609ef4 --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,157 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-tasks` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-tasks/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Methods expect request objects. We provide a script that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install google-cloud-tasks +``` + +* The script `fixup_tasks_v2_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_tasks_v2_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import tasks_v2 + +client = tasks_v2.CloudTasksClient() + +build = client.get_queue("queue_name") +``` + + +**After:** +```py +from google.cloud import tasks_v2 + +client = tasks_v2.CloudTasksClient() + +build = client.get_queue(request={'name': "queue_name"}) +``` + +### More Details + +In `google-cloud-tasks<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def create_queue( + self, + parent, + queue, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer. + + +**After:** +```py + def create_queue( + self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.create_queue( + request={ + "parent": parent, + "queue": queue, + } +) +``` + +```py +response = client.create_queue( + parent=parent, + queue=queue, +) +``` + +This call is invalid because it mixes `request` with a keyword argument `queue`. Executing this code +will result in an error. + +```py +response = client.create_queue( + request={ + "parent": parent, + }, + queue=queue +) +``` + + + +## Enums and Types + + +> **WARNING**: Breaking change + +The submodules `enums` and `types` have been removed. + +**Before:** +```py +from google.cloud import tasks_v2 + +http_method = tasks_v2.enums.HttpMethod.POST +queue = tasks_v2.types.Queue(name="name") +``` + + +**After:** +```py +from google.cloud import tasks_v2 + +http_method = tasks_v2.HttpMethod.POST +queue = tasks_v2.Queue(name="name") +``` + +## Location Path Helper Method + +Location path helper method has been removed. Please construct +the path manually. + +```py +project = 'my-project' +location = 'location' + +location_path = f'projects/{project}/locations/{location}' +``` diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 00000000..01097c8c --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/gapic/v2/api.rst b/docs/gapic/v2/api.rst deleted file mode 100644 index 229856fb..00000000 --- a/docs/gapic/v2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Cloud Tasks API -========================== - -.. automodule:: google.cloud.tasks_v2 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v2/types.rst b/docs/gapic/v2/types.rst deleted file mode 100644 index 8e518bb8..00000000 --- a/docs/gapic/v2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Cloud Tasks API Client -================================ - -.. automodule:: google.cloud.tasks_v2.types - :members: \ No newline at end of file diff --git a/docs/gapic/v2beta2/api.rst b/docs/gapic/v2beta2/api.rst deleted file mode 100644 index 80696d86..00000000 --- a/docs/gapic/v2beta2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Cloud Tasks API -========================== - -.. automodule:: google.cloud.tasks_v2beta2 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v2beta2/types.rst b/docs/gapic/v2beta2/types.rst deleted file mode 100644 index 375c1887..00000000 --- a/docs/gapic/v2beta2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Cloud Tasks API Client -================================ - -.. automodule:: google.cloud.tasks_v2beta2.types - :members: \ No newline at end of file diff --git a/docs/gapic/v2beta3/api.rst b/docs/gapic/v2beta3/api.rst deleted file mode 100644 index 17e911b7..00000000 --- a/docs/gapic/v2beta3/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Cloud Tasks API -========================== - -.. automodule:: google.cloud.tasks_v2beta3 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v2beta3/types.rst b/docs/gapic/v2beta3/types.rst deleted file mode 100644 index 42bed0b5..00000000 --- a/docs/gapic/v2beta3/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Cloud Tasks API Client -================================ - -.. automodule:: google.cloud.tasks_v2beta3.types - :members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 2c816781..26619de7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,8 +11,8 @@ API. By default, you will get ``v2``, the latest version. .. toctree:: :maxdepth: 2 - gapic/v2/api - gapic/v2/types + tasks_v2/services + tasks_v2/types The previous beta releases, spelled ``v2beta3`` and ``v2beta2``, are provided to continue to @@ -25,16 +25,26 @@ v2beta3: .. toctree:: :maxdepth: 2 - gapic/v2beta3/api - gapic/v2beta3/types + tasks_v2beta3/services + tasks_v2beta3/types v2beta2: .. toctree:: :maxdepth: 2 - gapic/v2beta2/api - gapic/v2beta2/types + tasks_v2beta2/services + tasks_v2beta2/types + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING Changelog --------- diff --git a/docs/tasks_v2/services.rst b/docs/tasks_v2/services.rst new file mode 100644 index 00000000..f1767aa7 --- /dev/null +++ b/docs/tasks_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2 API +====================================== + +.. automodule:: google.cloud.tasks_v2.services.cloud_tasks + :members: + :inherited-members: diff --git a/docs/tasks_v2/types.rst b/docs/tasks_v2/types.rst new file mode 100644 index 00000000..886cf90e --- /dev/null +++ b/docs/tasks_v2/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Tasks v2 API +=================================== + +.. automodule:: google.cloud.tasks_v2.types + :members: diff --git a/docs/tasks_v2beta2/services.rst b/docs/tasks_v2beta2/services.rst new file mode 100644 index 00000000..21623446 --- /dev/null +++ b/docs/tasks_v2beta2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2beta2 API +=========================================== + +.. automodule:: google.cloud.tasks_v2beta2.services.cloud_tasks + :members: + :inherited-members: diff --git a/docs/tasks_v2beta2/types.rst b/docs/tasks_v2beta2/types.rst new file mode 100644 index 00000000..b12cd9ed --- /dev/null +++ b/docs/tasks_v2beta2/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Tasks v2beta2 API +======================================== + +.. automodule:: google.cloud.tasks_v2beta2.types + :members: diff --git a/docs/tasks_v2beta3/services.rst b/docs/tasks_v2beta3/services.rst new file mode 100644 index 00000000..9b56326f --- /dev/null +++ b/docs/tasks_v2beta3/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2beta3 API +=========================================== + +.. automodule:: google.cloud.tasks_v2beta3.services.cloud_tasks + :members: + :inherited-members: diff --git a/docs/tasks_v2beta3/types.rst b/docs/tasks_v2beta3/types.rst new file mode 100644 index 00000000..50611382 --- /dev/null +++ b/docs/tasks_v2beta3/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Tasks v2beta3 API +======================================== + +.. automodule:: google.cloud.tasks_v2beta3.types + :members: diff --git a/google/cloud/tasks/__init__.py b/google/cloud/tasks/__init__.py new file mode 100644 index 00000000..67168406 --- /dev/null +++ b/google/cloud/tasks/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.tasks_v2.services.cloud_tasks.async_client import ( + CloudTasksAsyncClient, +) +from google.cloud.tasks_v2.services.cloud_tasks.client import CloudTasksClient +from google.cloud.tasks_v2.types.cloudtasks import CreateQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import CreateTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import DeleteQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import DeleteTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import GetQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import GetTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import ListQueuesRequest +from google.cloud.tasks_v2.types.cloudtasks import ListQueuesResponse +from google.cloud.tasks_v2.types.cloudtasks import ListTasksRequest +from google.cloud.tasks_v2.types.cloudtasks import ListTasksResponse +from google.cloud.tasks_v2.types.cloudtasks import PauseQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import PurgeQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import ResumeQueueRequest +from google.cloud.tasks_v2.types.cloudtasks import RunTaskRequest +from google.cloud.tasks_v2.types.cloudtasks import UpdateQueueRequest +from google.cloud.tasks_v2.types.queue import Queue +from google.cloud.tasks_v2.types.queue import RateLimits +from google.cloud.tasks_v2.types.queue import RetryConfig +from google.cloud.tasks_v2.types.queue import StackdriverLoggingConfig +from google.cloud.tasks_v2.types.target import AppEngineHttpRequest +from google.cloud.tasks_v2.types.target import AppEngineRouting +from google.cloud.tasks_v2.types.target import HttpMethod +from google.cloud.tasks_v2.types.target import HttpRequest +from google.cloud.tasks_v2.types.target import OAuthToken +from google.cloud.tasks_v2.types.target import OidcToken +from google.cloud.tasks_v2.types.task import Attempt +from google.cloud.tasks_v2.types.task import Task + +__all__ = ( + "AppEngineHttpRequest", + "AppEngineRouting", + "Attempt", + "CloudTasksAsyncClient", + "CloudTasksClient", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "HttpMethod", + "HttpRequest", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "OAuthToken", + "OidcToken", + "PauseQueueRequest", + "PurgeQueueRequest", + "Queue", + "RateLimits", + "ResumeQueueRequest", + "RetryConfig", + "RunTaskRequest", + "StackdriverLoggingConfig", + "Task", + "UpdateQueueRequest", +) diff --git a/google/cloud/tasks/py.typed b/google/cloud/tasks/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/google/cloud/tasks/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/google/cloud/tasks_v2/__init__.py b/google/cloud/tasks_v2/__init__.py index b9550b95..da740ea0 100644 --- a/google/cloud/tasks_v2/__init__.py +++ b/google/cloud/tasks_v2/__init__.py @@ -1,45 +1,77 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.tasks_v2 import types -from google.cloud.tasks_v2.gapic import cloud_tasks_client -from google.cloud.tasks_v2.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class CloudTasksClient(cloud_tasks_client.CloudTasksClient): - __doc__ = cloud_tasks_client.CloudTasksClient.__doc__ - enums = enums +from .services.cloud_tasks import CloudTasksClient +from .types.cloudtasks import CreateQueueRequest +from .types.cloudtasks import CreateTaskRequest +from .types.cloudtasks import DeleteQueueRequest +from .types.cloudtasks import DeleteTaskRequest +from .types.cloudtasks import GetQueueRequest +from .types.cloudtasks import GetTaskRequest +from .types.cloudtasks import ListQueuesRequest +from .types.cloudtasks import ListQueuesResponse +from .types.cloudtasks import ListTasksRequest +from .types.cloudtasks import ListTasksResponse +from .types.cloudtasks import PauseQueueRequest +from .types.cloudtasks import PurgeQueueRequest +from .types.cloudtasks import ResumeQueueRequest +from .types.cloudtasks import RunTaskRequest +from .types.cloudtasks import UpdateQueueRequest +from .types.queue import Queue +from .types.queue import RateLimits +from .types.queue import RetryConfig +from .types.queue import StackdriverLoggingConfig +from .types.target import AppEngineHttpRequest +from .types.target import AppEngineRouting +from .types.target import HttpMethod +from .types.target import HttpRequest +from .types.target import OAuthToken +from .types.target import OidcToken +from .types.task import Attempt +from .types.task import Task __all__ = ( - "enums", - "types", + "AppEngineHttpRequest", + "AppEngineRouting", + "Attempt", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "HttpMethod", + "HttpRequest", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "OAuthToken", + "OidcToken", + "PauseQueueRequest", + "PurgeQueueRequest", + "Queue", + "RateLimits", + "ResumeQueueRequest", + "RetryConfig", + "RunTaskRequest", + "StackdriverLoggingConfig", + "Task", + "UpdateQueueRequest", "CloudTasksClient", ) diff --git a/google/cloud/tasks_v2/gapic/cloud_tasks_client.py b/google/cloud/tasks_v2/gapic/cloud_tasks_client.py deleted file mode 100644 index d0f62bff..00000000 --- a/google/cloud/tasks_v2/gapic/cloud_tasks_client.py +++ /dev/null @@ -1,1682 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.tasks.v2 CloudTasks API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.tasks_v2.gapic import cloud_tasks_client_config -from google.cloud.tasks_v2.gapic import enums -from google.cloud.tasks_v2.gapic.transports import cloud_tasks_grpc_transport -from google.cloud.tasks_v2.proto import cloudtasks_pb2 -from google.cloud.tasks_v2.proto import cloudtasks_pb2_grpc -from google.cloud.tasks_v2.proto import queue_pb2 -from google.cloud.tasks_v2.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-tasks",).version - - -class CloudTasksClient(object): - """ - Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - SERVICE_ADDRESS = "cloudtasks.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.tasks.v2.CloudTasks" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def location_path(cls, project, location): - """Return a fully-qualified location string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}", - project=project, - location=location, - ) - - @classmethod - def queue_path(cls, project, location, queue): - """Return a fully-qualified queue string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/queues/{queue}", - project=project, - location=location, - queue=queue, - ) - - @classmethod - def task_path(cls, project, location, queue, task): - """Return a fully-qualified task string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}", - project=project, - location=location, - queue=queue, - task=task, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.CloudTasksGrpcTransport, - Callable[[~.Credentials, type], ~.CloudTasksGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = cloud_tasks_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=cloud_tasks_grpc_transport.CloudTasksGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = cloud_tasks_grpc_transport.CloudTasksGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_queues( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists queues. - - Queues are returned in lexicographical order. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_queues(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_queues(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter_ (str): ``filter`` can be used to specify a subset of queues. Any ``Queue`` - field can be used as a filter and several operators as supported. For - example: ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver's Advanced Logs - Filters `__. - - Sample filter "state: PAUSED". - - Note that using filters might cause fewer queues than the requested - page_size to be returned. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.tasks_v2.types.Queue` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_queues" not in self._inner_api_calls: - self._inner_api_calls[ - "list_queues" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_queues, - default_retry=self._method_configs["ListQueues"].retry, - default_timeout=self._method_configs["ListQueues"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ListQueuesRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_queues"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="queues", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a queue. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.get_queue(name) - - Args: - name (str): Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "get_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_queue, - default_retry=self._method_configs["GetQueue"].retry, - default_timeout=self._method_configs["GetQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.GetQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_queue( - self, - parent, - queue, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # TODO: Initialize `queue`: - >>> queue = {} - >>> - >>> response = client.create_queue(parent, queue) - - Args: - parent (str): Required. The location name in which the queue will be created. For - example: ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling Cloud Tasks' - implementation of ``ListLocations``. - queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required. The queue to create. - - ``Queue's name`` cannot be the same as an existing queue. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2.types.Queue` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "create_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_queue, - default_retry=self._method_configs["CreateQueue"].retry, - default_timeout=self._method_configs["CreateQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_queue( - self, - queue, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a queue. - - This method creates the queue if it does not exist and updates the queue - if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> # TODO: Initialize `queue`: - >>> queue = {} - >>> - >>> response = client.update_queue(queue) - - Args: - queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required. The queue to create or update. - - The queue's ``name`` must be specified. - - Output only fields cannot be modified using UpdateQueue. Any value - specified for an output only field will be ignored. The queue's ``name`` - cannot be changed. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2.types.Queue` - update_mask (Union[dict, ~google.cloud.tasks_v2.types.FieldMask]): A mask used to specify which fields of the queue are being updated. - - If empty, then all fields will be updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "update_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_queue, - default_retry=self._method_configs["UpdateQueue"].retry, - default_timeout=self._method_configs["UpdateQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.UpdateQueueRequest( - queue=queue, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("queue.name", queue.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> client.delete_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_queue, - default_retry=self._method_configs["DeleteQueue"].retry, - default_timeout=self._method_configs["DeleteQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.DeleteQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def purge_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.purge_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "purge_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "purge_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.purge_queue, - default_retry=self._method_configs["PurgeQueue"].retry, - default_timeout=self._method_configs["PurgeQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.PurgeQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["purge_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def pause_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks until - the queue is resumed via ``ResumeQueue``. Tasks can still be added when - the queue is paused. A queue is paused if its ``state`` is ``PAUSED``. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.pause_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "pause_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "pause_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pause_queue, - default_retry=self._method_configs["PauseQueue"].retry, - default_timeout=self._method_configs["PauseQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.PauseQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["pause_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def resume_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Resume a queue. - - This method resumes a queue after it has been ``PAUSED`` or - ``DISABLED``. The state of a queue is stored in the queue's ``state``; - after calling this method it will be set to ``RUNNING``. - - WARNING: Resuming many high-QPS queues at the same time can lead to - target overloading. If you are resuming high-QPS queues, follow the - 500/50/5 pattern described in `Managing Cloud Tasks Scaling - Risks `__. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.resume_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "resume_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "resume_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.resume_queue, - default_retry=self._method_configs["ResumeQueue"].retry, - default_timeout=self._method_configs["ResumeQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ResumeQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["resume_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a ``Queue``. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.tasks_v2.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy for a ``Queue``. Replaces any - existing policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.tasks_v2.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on a ``Queue``. If the - resource does not exist, this will return an empty set of permissions, - not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_tasks( - self, - parent, - response_view=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the tasks in a queue. - - By default, only the ``BASIC`` view is retrieved due to performance - considerations; ``response_view`` controls the subset of information - which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> # Iterate over all results - >>> for element in client.list_tasks(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_tasks(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (~google.cloud.tasks_v2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.tasks_v2.types.Task` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_tasks" not in self._inner_api_calls: - self._inner_api_calls[ - "list_tasks" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_tasks, - default_retry=self._method_configs["ListTasks"].retry, - default_timeout=self._method_configs["ListTasks"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ListTasksRequest( - parent=parent, response_view=response_view, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_tasks"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="tasks", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_task( - self, - name, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a task. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> response = client.get_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_task" not in self._inner_api_calls: - self._inner_api_calls[ - "get_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_task, - default_retry=self._method_configs["GetTask"].retry, - default_timeout=self._method_configs["GetTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.GetTaskRequest(name=name, response_view=response_view,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_task( - self, - parent, - task, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - - The maximum task size is 100KB. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> # TODO: Initialize `task`: - >>> task = {} - >>> - >>> response = client.create_task(parent, task) - - Args: - parent (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - task (Union[dict, ~google.cloud.tasks_v2.types.Task]): Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task ``name``. If a name is not - specified then the system will generate a random unique task id, which - will be set in the task returned in the ``response``. - - If ``schedule_time`` is not set or is in the past then Cloud Tasks will - set it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task de-duplication. If a task's - ID is identical to that of an existing task or a task that was deleted - or executed recently then the call will fail with ``ALREADY_EXISTS``. If - the task's queue was created using Cloud Tasks, then another task with - the same name can't be created for ~1hour after the original task was - deleted or executed. If the task's queue was created using queue.yaml or - queue.xml, then another task with the same name can't be created for - ~9days after the original task was deleted or executed. - - Because there is an extra lookup cost to identify duplicate task names, - these ``CreateTask`` calls have significantly increased latency. Using - hashed strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have sequential - prefixes, for example using a timestamp, causes an increase in latency - and error rates in all task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store and serve tasks - efficiently. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2.types.Task` - response_view (~google.cloud.tasks_v2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_task" not in self._inner_api_calls: - self._inner_api_calls[ - "create_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_task, - default_retry=self._method_configs["CreateTask"].retry, - default_timeout=self._method_configs["CreateTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.CreateTaskRequest( - parent=parent, task=task, response_view=response_view, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_task( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has executed successfully or permanently - failed. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> client.delete_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_task" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_task, - default_retry=self._method_configs["DeleteTask"].retry, - default_timeout=self._method_configs["DeleteTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.DeleteTaskRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def run_task( - self, - name, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its ``RateLimits`` or - is ``PAUSED``. - - This command is meant to be used for manual debugging. For example, - ``RunTask`` can be used to retry a failed task after a fix has been made - or to manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the ``status`` after the task is dispatched but before the task - is received by its target. - - If Cloud Tasks receives a successful response from the task's target, - then the task will be deleted; otherwise the task's ``schedule_time`` - will be reset to the time that ``RunTask`` was called plus the retry - delay specified in the queue's ``RetryConfig``. - - ``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has - already succeeded or permanently failed. - - Example: - >>> from google.cloud import tasks_v2 - >>> - >>> client = tasks_v2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> response = client.run_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "run_task" not in self._inner_api_calls: - self._inner_api_calls[ - "run_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_task, - default_retry=self._method_configs["RunTask"].retry, - default_timeout=self._method_configs["RunTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.RunTaskRequest(name=name, response_view=response_view,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["run_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/tasks_v2/gapic/cloud_tasks_client_config.py b/google/cloud/tasks_v2/gapic/cloud_tasks_client_config.py deleted file mode 100644 index 1517ab34..00000000 --- a/google/cloud/tasks_v2/gapic/cloud_tasks_client_config.py +++ /dev/null @@ -1,122 +0,0 @@ -config = { - "interfaces": { - "google.cloud.tasks.v2.CloudTasks": { - "retry_codes": { - "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 10000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 10000, - }, - }, - "methods": { - "ListQueues": { - "timeout_millis": 15000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetQueue": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UpdateQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteQueue": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "PurgeQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "PauseQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ResumeQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetIamPolicy": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "SetIamPolicy": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "TestIamPermissions": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListTasks": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetTask": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateTask": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteTask": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "RunTask": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - }, - } - } -} diff --git a/google/cloud/tasks_v2/gapic/enums.py b/google/cloud/tasks_v2/gapic/enums.py deleted file mode 100644 index 60106a25..00000000 --- a/google/cloud/tasks_v2/gapic/enums.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class HttpMethod(enum.IntEnum): - """ - The HTTP method used to deliver the task. - - Attributes: - HTTP_METHOD_UNSPECIFIED (int): HTTP method unspecified - POST (int): HTTP POST - GET (int): HTTP GET - HEAD (int): HTTP HEAD - PUT (int): HTTP PUT - DELETE (int): HTTP DELETE - PATCH (int): HTTP PATCH - OPTIONS (int): HTTP OPTIONS - """ - - HTTP_METHOD_UNSPECIFIED = 0 - POST = 1 - GET = 2 - HEAD = 3 - PUT = 4 - DELETE = 5 - PATCH = 6 - OPTIONS = 7 - - -class Queue(object): - class State(enum.IntEnum): - """ - State of the queue. - - Attributes: - STATE_UNSPECIFIED (int): Unspecified state. - RUNNING (int): The queue is running. Tasks can be dispatched. - - If the queue was created using Cloud Tasks and the queue has had no - activity (method calls or task dispatches) for 30 days, the queue may - take a few minutes to re-activate. Some method calls may return - ``NOT_FOUND`` and tasks may not be dispatched for a few minutes until - the queue has been re-activated. - PAUSED (int): Tasks are paused by the user. If the queue is paused then Cloud - Tasks will stop delivering tasks from it, but more tasks can - still be added to it by the user. - DISABLED (int): The queue is disabled. - - A queue becomes ``DISABLED`` when - `queue.yaml `__ - or - `queue.xml `__ - is uploaded which does not contain the queue. You cannot directly - disable a queue. - - When a queue is disabled, tasks can still be added to a queue but the - tasks are not dispatched. - - To permanently delete this queue and all of its tasks, call - ``DeleteQueue``. - """ - - STATE_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - DISABLED = 3 - - -class Task(object): - class View(enum.IntEnum): - """ - The view specifies a subset of ``Task`` data. - - When a task is returned in a response, not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Attributes: - VIEW_UNSPECIFIED (int): Unspecified. Defaults to BASIC. - BASIC (int): The basic view omits fields which can be large or can contain - sensitive data. - - This view does not include the ``body in AppEngineHttpRequest``. Bodies - are desirable to return only when needed, because they can be large and - because of the sensitivity of the data that you choose to store in it. - FULL (int): All information is returned. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `__ permission on the - ``Queue`` resource. - """ - - VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 diff --git a/google/cloud/tasks_v2/gapic/transports/__init__.py b/google/cloud/tasks_v2/gapic/transports/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2/gapic/transports/cloud_tasks_grpc_transport.py b/google/cloud/tasks_v2/gapic/transports/cloud_tasks_grpc_transport.py deleted file mode 100644 index 6a29a6d3..00000000 --- a/google/cloud/tasks_v2/gapic/transports/cloud_tasks_grpc_transport.py +++ /dev/null @@ -1,428 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.tasks_v2.proto import cloudtasks_pb2_grpc - - -class CloudTasksGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.tasks.v2 CloudTasks API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="cloudtasks.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "cloud_tasks_stub": cloudtasks_pb2_grpc.CloudTasksStub(channel), - } - - @classmethod - def create_channel( - cls, address="cloudtasks.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_queues(self): - """Return the gRPC stub for :meth:`CloudTasksClient.list_queues`. - - Lists queues. - - Queues are returned in lexicographical order. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ListQueues - - @property - def get_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_queue`. - - Gets a queue. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetQueue - - @property - def create_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.create_queue`. - - Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].CreateQueue - - @property - def update_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.update_queue`. - - Updates a queue. - - This method creates the queue if it does not exist and updates the queue - if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].UpdateQueue - - @property - def delete_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.delete_queue`. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].DeleteQueue - - @property - def purge_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.purge_queue`. - - Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].PurgeQueue - - @property - def pause_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.pause_queue`. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks until - the queue is resumed via ``ResumeQueue``. Tasks can still be added when - the queue is paused. A queue is paused if its ``state`` is ``PAUSED``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].PauseQueue - - @property - def resume_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.resume_queue`. - - Resume a queue. - - This method resumes a queue after it has been ``PAUSED`` or - ``DISABLED``. The state of a queue is stored in the queue's ``state``; - after calling this method it will be set to ``RUNNING``. - - WARNING: Resuming many high-QPS queues at the same time can lead to - target overloading. If you are resuming high-QPS queues, follow the - 500/50/5 pattern described in `Managing Cloud Tasks Scaling - Risks `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ResumeQueue - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_iam_policy`. - - Gets the access control policy for a ``Queue``. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetIamPolicy - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`CloudTasksClient.set_iam_policy`. - - Sets the access control policy for a ``Queue``. Replaces any - existing policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].SetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`CloudTasksClient.test_iam_permissions`. - - Returns permissions that a caller has on a ``Queue``. If the - resource does not exist, this will return an empty set of permissions, - not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].TestIamPermissions - - @property - def list_tasks(self): - """Return the gRPC stub for :meth:`CloudTasksClient.list_tasks`. - - Lists the tasks in a queue. - - By default, only the ``BASIC`` view is retrieved due to performance - considerations; ``response_view`` controls the subset of information - which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ListTasks - - @property - def get_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_task`. - - Gets a task. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetTask - - @property - def create_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.create_task`. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - - The maximum task size is 100KB. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].CreateTask - - @property - def delete_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.delete_task`. - - Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has executed successfully or permanently - failed. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].DeleteTask - - @property - def run_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.run_task`. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its ``RateLimits`` or - is ``PAUSED``. - - This command is meant to be used for manual debugging. For example, - ``RunTask`` can be used to retry a failed task after a fix has been made - or to manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the ``status`` after the task is dispatched but before the task - is received by its target. - - If Cloud Tasks receives a successful response from the task's target, - then the task will be deleted; otherwise the task's ``schedule_time`` - will be reset to the time that ``RunTask`` was called plus the retry - delay specified in the queue's ``RetryConfig``. - - ``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has - already succeeded or permanently failed. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].RunTask diff --git a/google/cloud/tasks_v2/proto/__init__.py b/google/cloud/tasks_v2/proto/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2/proto/cloudtasks_pb2.py b/google/cloud/tasks_v2/proto/cloudtasks_pb2.py deleted file mode 100644 index 5819e162..00000000 --- a/google/cloud/tasks_v2/proto/cloudtasks_pb2.py +++ /dev/null @@ -1,1611 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2/proto/cloudtasks.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2.proto import ( - queue_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2, -) -from google.cloud.tasks_v2.proto import ( - task_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2, -) -from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 -from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2/proto/cloudtasks.proto", - package="google.cloud.tasks.v2", - syntax="proto3", - serialized_options=b"\n\031com.google.cloud.tasks.v2B\017CloudTasksProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\242\002\005TASKS", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n,google/cloud/tasks_v2/proto/cloudtasks.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\'google/cloud/tasks_v2/proto/queue.proto\x1a&google/cloud/tasks_v2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"[\n\x12ListQueuesResponse\x12,\n\x06queues\x18\x01 \x03(\x0b\x32\x1c.google.cloud.tasks.v2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x7f\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaa\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"X\n\x11ListTasksResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x1b.google.cloud.tasks.v2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x7f\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"\xb4\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12.\n\x04task\x18\x02 \x01(\x0b\x32\x1b.google.cloud.tasks.v2.TaskB\x03\xe0\x41\x02\x12\x37\n\rresponse_view\x18\x03 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x7f\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View2\xdd\x14\n\nCloudTasks\x12\x9e\x01\n\nListQueues\x12(.google.cloud.tasks.v2.ListQueuesRequest\x1a).google.cloud.tasks.v2.ListQueuesResponse";\x82\xd3\xe4\x93\x02,\x12*/v2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x8b\x01\n\x08GetQueue\x12&.google.cloud.tasks.v2.GetQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"9\x82\xd3\xe4\x93\x02,\x12*/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa0\x01\n\x0b\x43reateQueue\x12).google.cloud.tasks.v2.CreateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"H\x82\xd3\xe4\x93\x02\x33"*/v2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xab\x01\n\x0bUpdateQueue\x12).google.cloud.tasks.v2.UpdateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"S\x82\xd3\xe4\x93\x02\x39\x32\x30/v2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x8b\x01\n\x0b\x44\x65leteQueue\x12).google.cloud.tasks.v2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty"9\x82\xd3\xe4\x93\x02,**/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\x98\x01\n\nPurgeQueue\x12(.google.cloud.tasks.v2.PurgeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\x98\x01\n\nPauseQueue\x12(.google.cloud.tasks.v2.PauseQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\x0bResumeQueue\x12).google.cloud.tasks.v2.ResumeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"C\x82\xd3\xe4\x93\x02\x36"1/v2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa3\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"X\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xce\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"c\x82\xd3\xe4\x93\x02\x46"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xa3\x01\n\tListTasks\x12\'.google.cloud.tasks.v2.ListTasksRequest\x1a(.google.cloud.tasks.v2.ListTasksResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x90\x01\n\x07GetTask\x12%.google.cloud.tasks.v2.GetTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa0\x01\n\nCreateTask\x12(.google.cloud.tasks.v2.CreateTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"K\x82\xd3\xe4\x93\x02\x37"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x91\x01\n\nDeleteTask\x12(.google.cloud.tasks.v2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02\x34*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\x97\x01\n\x07RunTask\x12%.google.cloud.tasks.v2.RunTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"H\x82\xd3\xe4\x93\x02;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x19\x63om.google.cloud.tasks.v2B\x0f\x43loudTasksProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\xa2\x02\x05TASKSb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.DESCRIPTOR, - google_dot_iam_dot_v1_dot_iam__policy__pb2.DESCRIPTOR, - google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - ], -) - - -_LISTQUEUESREQUEST = _descriptor.Descriptor( - name="ListQueuesRequest", - full_name="google.cloud.tasks.v2.ListQueuesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2.ListQueuesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.tasks.v2.ListQueuesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.tasks.v2.ListQueuesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.tasks.v2.ListQueuesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=391, - serialized_end=522, -) - - -_LISTQUEUESRESPONSE = _descriptor.Descriptor( - name="ListQueuesResponse", - full_name="google.cloud.tasks.v2.ListQueuesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="queues", - full_name="google.cloud.tasks.v2.ListQueuesResponse.queues", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.tasks.v2.ListQueuesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=524, - serialized_end=615, -) - - -_GETQUEUEREQUEST = _descriptor.Descriptor( - name="GetQueueRequest", - full_name="google.cloud.tasks.v2.GetQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.GetQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=617, - serialized_end=689, -) - - -_CREATEQUEUEREQUEST = _descriptor.Descriptor( - name="CreateQueueRequest", - full_name="google.cloud.tasks.v2.CreateQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2.CreateQueueRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="queue", - full_name="google.cloud.tasks.v2.CreateQueueRequest.queue", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=691, - serialized_end=818, -) - - -_UPDATEQUEUEREQUEST = _descriptor.Descriptor( - name="UpdateQueueRequest", - full_name="google.cloud.tasks.v2.UpdateQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="queue", - full_name="google.cloud.tasks.v2.UpdateQueueRequest.queue", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.tasks.v2.UpdateQueueRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=820, - serialized_end=939, -) - - -_DELETEQUEUEREQUEST = _descriptor.Descriptor( - name="DeleteQueueRequest", - full_name="google.cloud.tasks.v2.DeleteQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.DeleteQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=941, - serialized_end=1016, -) - - -_PURGEQUEUEREQUEST = _descriptor.Descriptor( - name="PurgeQueueRequest", - full_name="google.cloud.tasks.v2.PurgeQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.PurgeQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1018, - serialized_end=1092, -) - - -_PAUSEQUEUEREQUEST = _descriptor.Descriptor( - name="PauseQueueRequest", - full_name="google.cloud.tasks.v2.PauseQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.PauseQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1094, - serialized_end=1168, -) - - -_RESUMEQUEUEREQUEST = _descriptor.Descriptor( - name="ResumeQueueRequest", - full_name="google.cloud.tasks.v2.ResumeQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.ResumeQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1170, - serialized_end=1245, -) - - -_LISTTASKSREQUEST = _descriptor.Descriptor( - name="ListTasksRequest", - full_name="google.cloud.tasks.v2.ListTasksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2.ListTasksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \022\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2.ListTasksRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.tasks.v2.ListTasksRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.tasks.v2.ListTasksRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1248, - serialized_end=1418, -) - - -_LISTTASKSRESPONSE = _descriptor.Descriptor( - name="ListTasksResponse", - full_name="google.cloud.tasks.v2.ListTasksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="tasks", - full_name="google.cloud.tasks.v2.ListTasksResponse.tasks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.tasks.v2.ListTasksResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1420, - serialized_end=1508, -) - - -_GETTASKREQUEST = _descriptor.Descriptor( - name="GetTaskRequest", - full_name="google.cloud.tasks.v2.GetTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.GetTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2.GetTaskRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1510, - serialized_end=1637, -) - - -_CREATETASKREQUEST = _descriptor.Descriptor( - name="CreateTaskRequest", - full_name="google.cloud.tasks.v2.CreateTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2.CreateTaskRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \022\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="task", - full_name="google.cloud.tasks.v2.CreateTaskRequest.task", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2.CreateTaskRequest.response_view", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1640, - serialized_end=1820, -) - - -_DELETETASKREQUEST = _descriptor.Descriptor( - name="DeleteTaskRequest", - full_name="google.cloud.tasks.v2.DeleteTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.DeleteTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1822, - serialized_end=1895, -) - - -_RUNTASKREQUEST = _descriptor.Descriptor( - name="RunTaskRequest", - full_name="google.cloud.tasks.v2.RunTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.RunTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2.RunTaskRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1897, - serialized_end=2024, -) - -_LISTQUEUESRESPONSE.fields_by_name[ - "queues" -].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE -_CREATEQUEUEREQUEST.fields_by_name[ - "queue" -].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE -_UPDATEQUEUEREQUEST.fields_by_name[ - "queue" -].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE -_UPDATEQUEUEREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTTASKSREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW -_LISTTASKSRESPONSE.fields_by_name[ - "tasks" -].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK -_GETTASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW -_CREATETASKREQUEST.fields_by_name[ - "task" -].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK -_CREATETASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW -_RUNTASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW -DESCRIPTOR.message_types_by_name["ListQueuesRequest"] = _LISTQUEUESREQUEST -DESCRIPTOR.message_types_by_name["ListQueuesResponse"] = _LISTQUEUESRESPONSE -DESCRIPTOR.message_types_by_name["GetQueueRequest"] = _GETQUEUEREQUEST -DESCRIPTOR.message_types_by_name["CreateQueueRequest"] = _CREATEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["UpdateQueueRequest"] = _UPDATEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["DeleteQueueRequest"] = _DELETEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["PurgeQueueRequest"] = _PURGEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["PauseQueueRequest"] = _PAUSEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["ResumeQueueRequest"] = _RESUMEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["ListTasksRequest"] = _LISTTASKSREQUEST -DESCRIPTOR.message_types_by_name["ListTasksResponse"] = _LISTTASKSRESPONSE -DESCRIPTOR.message_types_by_name["GetTaskRequest"] = _GETTASKREQUEST -DESCRIPTOR.message_types_by_name["CreateTaskRequest"] = _CREATETASKREQUEST -DESCRIPTOR.message_types_by_name["DeleteTaskRequest"] = _DELETETASKREQUEST -DESCRIPTOR.message_types_by_name["RunTaskRequest"] = _RUNTASKREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListQueuesRequest = _reflection.GeneratedProtocolMessageType( - "ListQueuesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTQUEUESREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - - Attributes: - parent: - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter: - \ ``filter`` can be used to specify a subset of queues. Any - [Queue][google.cloud.tasks.v2.Queue] field can be used as a - filter and several operators as supported. For example: ``<=, - <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver’s Advanced Logs Filters `_. Sample - filter “state: PAUSED”. Note that using filters might cause - fewer queues than the requested page_size to be returned. - page_size: - Requested page size. The maximum page size is 9800. If - unspecified, the page size will be the maximum. Fewer queues - than requested might be returned, even if more queues exist; - use the [next_page_token][google.cloud.tasks.v2.ListQueuesResp - onse.next_page_token] in the response to determine if more - queues exist. - page_token: - A token identifying the page of results to return. To request - the first page results, page_token must be empty. To request - the next page of results, page_token must be the value of [nex - t_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_pa - ge_token] returned from the previous call to - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] - method. It is an error to switch the value of the - [filter][google.cloud.tasks.v2.ListQueuesRequest.filter] while - iterating through pages. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListQueuesRequest) - }, -) -_sym_db.RegisterMessage(ListQueuesRequest) - -ListQueuesResponse = _reflection.GeneratedProtocolMessageType( - "ListQueuesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTQUEUESRESPONSE, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Response message for - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. - - Attributes: - queues: - The list of queues. - next_page_token: - A token to retrieve next page of results. To return the next - page of results, call - [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with - this value as the [page_token][google.cloud.tasks.v2.ListQueue - sRequest.page_token]. If the next_page_token is empty, there - are no more results. The page token is valid for only 2 - hours. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListQueuesResponse) - }, -) -_sym_db.RegisterMessage(ListQueuesResponse) - -GetQueueRequest = _reflection.GeneratedProtocolMessageType( - "GetQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. - - Attributes: - name: - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.GetQueueRequest) - }, -) -_sym_db.RegisterMessage(GetQueueRequest) - -CreateQueueRequest = _reflection.GeneratedProtocolMessageType( - "CreateQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. - - Attributes: - parent: - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` The list of - allowed locations can be obtained by calling Cloud Tasks’ - implementation of [ListLocations][google.cloud.location.Locati - ons.ListLocations]. - queue: - Required. The queue to create. [Queue’s - name][google.cloud.tasks.v2.Queue.name] cannot be the same as - an existing queue. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.CreateQueueRequest) - }, -) -_sym_db.RegisterMessage(CreateQueueRequest) - -UpdateQueueRequest = _reflection.GeneratedProtocolMessageType( - "UpdateQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. - - Attributes: - queue: - Required. The queue to create or update. The queue’s - [name][google.cloud.tasks.v2.Queue.name] must be specified. - Output only fields cannot be modified using UpdateQueue. Any - value specified for an output only field will be ignored. The - queue’s [name][google.cloud.tasks.v2.Queue.name] cannot be - changed. - update_mask: - A mask used to specify which fields of the queue are being - updated. If empty, then all fields will be updated. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.UpdateQueueRequest) - }, -) -_sym_db.RegisterMessage(UpdateQueueRequest) - -DeleteQueueRequest = _reflection.GeneratedProtocolMessageType( - "DeleteQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.DeleteQueueRequest) - }, -) -_sym_db.RegisterMessage(DeleteQueueRequest) - -PurgeQueueRequest = _reflection.GeneratedProtocolMessageType( - "PurgeQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _PURGEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.PurgeQueueRequest) - }, -) -_sym_db.RegisterMessage(PurgeQueueRequest) - -PauseQueueRequest = _reflection.GeneratedProtocolMessageType( - "PauseQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _PAUSEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.PauseQueueRequest) - }, -) -_sym_db.RegisterMessage(PauseQueueRequest) - -ResumeQueueRequest = _reflection.GeneratedProtocolMessageType( - "ResumeQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _RESUMEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ResumeQueueRequest) - }, -) -_sym_db.RegisterMessage(ResumeQueueRequest) - -ListTasksRequest = _reflection.GeneratedProtocolMessageType( - "ListTasksRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTASKSREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for listing tasks using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - - Attributes: - parent: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - page_size: - Maximum page size. Fewer tasks than requested might be - returned, even if more tasks exist; use [next_page_token][goog - le.cloud.tasks.v2.ListTasksResponse.next_page_token] in the - response to determine if more tasks exist. The maximum page - size is 1000. If unspecified, the page size will be the - maximum. - page_token: - A token identifying the page of results to return. To request - the first page results, page_token must be empty. To request - the next page of results, page_token must be the value of [nex - t_page_token][google.cloud.tasks.v2.ListTasksResponse.next_pag - e_token] returned from the previous call to - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] - method. The page token is valid for only 2 hours. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListTasksRequest) - }, -) -_sym_db.RegisterMessage(ListTasksRequest) - -ListTasksResponse = _reflection.GeneratedProtocolMessageType( - "ListTasksResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTASKSRESPONSE, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. - - Attributes: - tasks: - The list of tasks. - next_page_token: - A token to retrieve next page of results. To return the next - page of results, call - [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] with - this value as the [page_token][google.cloud.tasks.v2.ListTasks - Request.page_token]. If the next_page_token is empty, there - are no more results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListTasksResponse) - }, -) -_sym_db.RegisterMessage(ListTasksResponse) - -GetTaskRequest = _reflection.GeneratedProtocolMessageType( - "GetTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTASKREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for getting a task using - [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.GetTaskRequest) - }, -) -_sym_db.RegisterMessage(GetTaskRequest) - -CreateTaskRequest = _reflection.GeneratedProtocolMessageType( - "CreateTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATETASKREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. - - Attributes: - parent: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - The queue must already exist. - task: - Required. The task to add. Task names have the following - format: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUE - UE_ID/tasks/TASK_ID``. The user can optionally specify a task - [name][google.cloud.tasks.v2.Task.name]. If a name is not - specified then the system will generate a random unique task - id, which will be set in the task returned in the - [response][google.cloud.tasks.v2.Task.name]. If - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] is - not set or is in the past then Cloud Tasks will set it to the - current time. Task De-duplication: Explicitly specifying a - task ID enables task de-duplication. If a task’s ID is - identical to that of an existing task or a task that was - deleted or executed recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task’s queue was created using Cloud Tasks, then another task - with the same name can’t be created for ~1hour after the - original task was deleted or executed. If the task’s queue was - created using queue.yaml or queue.xml, then another task with - the same name can’t be created for ~9days after the original - task was deleted or executed. Because there is an extra - lookup cost to identify duplicate task names, these - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have - sequential prefixes, for example using a timestamp, causes an - increase in latency and error rates in all task commands. The - infrastructure relies on an approximately uniform distribution - of task ids to store and serve tasks efficiently. - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.CreateTaskRequest) - }, -) -_sym_db.RegisterMessage(CreateTaskRequest) - -DeleteTaskRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETASKREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for deleting a task using - [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.DeleteTaskRequest) - }, -) -_sym_db.RegisterMessage(DeleteTaskRequest) - -RunTaskRequest = _reflection.GeneratedProtocolMessageType( - "RunTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _RUNTASKREQUEST, - "__module__": "google.cloud.tasks_v2.proto.cloudtasks_pb2", - "__doc__": """Request message for forcing a task to run now using - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.RunTaskRequest) - }, -) -_sym_db.RegisterMessage(RunTaskRequest) - - -DESCRIPTOR._options = None -_LISTQUEUESREQUEST.fields_by_name["parent"]._options = None -_GETQUEUEREQUEST.fields_by_name["name"]._options = None -_CREATEQUEUEREQUEST.fields_by_name["parent"]._options = None -_CREATEQUEUEREQUEST.fields_by_name["queue"]._options = None -_UPDATEQUEUEREQUEST.fields_by_name["queue"]._options = None -_DELETEQUEUEREQUEST.fields_by_name["name"]._options = None -_PURGEQUEUEREQUEST.fields_by_name["name"]._options = None -_PAUSEQUEUEREQUEST.fields_by_name["name"]._options = None -_RESUMEQUEUEREQUEST.fields_by_name["name"]._options = None -_LISTTASKSREQUEST.fields_by_name["parent"]._options = None -_GETTASKREQUEST.fields_by_name["name"]._options = None -_CREATETASKREQUEST.fields_by_name["parent"]._options = None -_CREATETASKREQUEST.fields_by_name["task"]._options = None -_DELETETASKREQUEST.fields_by_name["name"]._options = None -_RUNTASKREQUEST.fields_by_name["name"]._options = None - -_CLOUDTASKS = _descriptor.ServiceDescriptor( - name="CloudTasks", - full_name="google.cloud.tasks.v2.CloudTasks", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform", - create_key=_descriptor._internal_create_key, - serialized_start=2027, - serialized_end=4680, - methods=[ - _descriptor.MethodDescriptor( - name="ListQueues", - full_name="google.cloud.tasks.v2.CloudTasks.ListQueues", - index=0, - containing_service=None, - input_type=_LISTQUEUESREQUEST, - output_type=_LISTQUEUESRESPONSE, - serialized_options=b"\202\323\344\223\002,\022*/v2/{parent=projects/*/locations/*}/queues\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetQueue", - full_name="google.cloud.tasks.v2.CloudTasks.GetQueue", - index=1, - containing_service=None, - input_type=_GETQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b"\202\323\344\223\002,\022*/v2/{name=projects/*/locations/*/queues/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateQueue", - full_name="google.cloud.tasks.v2.CloudTasks.CreateQueue", - index=2, - containing_service=None, - input_type=_CREATEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\0023"*/v2/{parent=projects/*/locations/*}/queues:\005queue\332A\014parent,queue', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateQueue", - full_name="google.cloud.tasks.v2.CloudTasks.UpdateQueue", - index=3, - containing_service=None, - input_type=_UPDATEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b"\202\323\344\223\002920/v2/{queue.name=projects/*/locations/*/queues/*}:\005queue\332A\021queue,update_mask", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteQueue", - full_name="google.cloud.tasks.v2.CloudTasks.DeleteQueue", - index=4, - containing_service=None, - input_type=_DELETEQUEUEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002,**/v2/{name=projects/*/locations/*/queues/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="PurgeQueue", - full_name="google.cloud.tasks.v2.CloudTasks.PurgeQueue", - index=5, - containing_service=None, - input_type=_PURGEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\0025"0/v2/{name=projects/*/locations/*/queues/*}:purge:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="PauseQueue", - full_name="google.cloud.tasks.v2.CloudTasks.PauseQueue", - index=6, - containing_service=None, - input_type=_PAUSEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\0025"0/v2/{name=projects/*/locations/*/queues/*}:pause:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ResumeQueue", - full_name="google.cloud.tasks.v2.CloudTasks.ResumeQueue", - index=7, - containing_service=None, - input_type=_RESUMEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\0026"1/v2/{name=projects/*/locations/*/queues/*}:resume:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetIamPolicy", - full_name="google.cloud.tasks.v2.CloudTasks.GetIamPolicy", - index=8, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, - output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, - serialized_options=b'\202\323\344\223\002@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\001*\332A\010resource', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="SetIamPolicy", - full_name="google.cloud.tasks.v2.CloudTasks.SetIamPolicy", - index=9, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, - output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, - serialized_options=b'\202\323\344\223\002@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\001*\332A\017resource,policy', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="TestIamPermissions", - full_name="google.cloud.tasks.v2.CloudTasks.TestIamPermissions", - index=10, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, - output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, - serialized_options=b'\202\323\344\223\002F"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\001*\332A\024resource,permissions', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTasks", - full_name="google.cloud.tasks.v2.CloudTasks.ListTasks", - index=11, - containing_service=None, - input_type=_LISTTASKSREQUEST, - output_type=_LISTTASKSRESPONSE, - serialized_options=b"\202\323\344\223\0024\0222/v2/{parent=projects/*/locations/*/queues/*}/tasks\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTask", - full_name="google.cloud.tasks.v2.CloudTasks.GetTask", - index=12, - containing_service=None, - input_type=_GETTASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK, - serialized_options=b"\202\323\344\223\0024\0222/v2/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateTask", - full_name="google.cloud.tasks.v2.CloudTasks.CreateTask", - index=13, - containing_service=None, - input_type=_CREATETASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\0027"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\001*\332A\013parent,task', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTask", - full_name="google.cloud.tasks.v2.CloudTasks.DeleteTask", - index=14, - containing_service=None, - input_type=_DELETETASKREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\0024*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="RunTask", - full_name="google.cloud.tasks.v2.CloudTasks.RunTask", - index=15, - containing_service=None, - input_type=_RUNTASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\002;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_CLOUDTASKS) - -DESCRIPTOR.services_by_name["CloudTasks"] = _CLOUDTASKS - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2/proto/cloudtasks_pb2_grpc.py b/google/cloud/tasks_v2/proto/cloudtasks_pb2_grpc.py deleted file mode 100644 index 838e3b80..00000000 --- a/google/cloud/tasks_v2/proto/cloudtasks_pb2_grpc.py +++ /dev/null @@ -1,880 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.tasks_v2.proto import ( - cloudtasks_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2, -) -from google.cloud.tasks_v2.proto import ( - queue_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2, -) -from google.cloud.tasks_v2.proto import ( - task_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2, -) -from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 -from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class CloudTasksStub(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListQueues = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/ListQueues", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, - ) - self.GetQueue = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/GetQueue", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.CreateQueue = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/CreateQueue", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.UpdateQueue = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.DeleteQueue = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.PurgeQueue = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.PauseQueue = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/PauseQueue", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.ResumeQueue = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.GetIamPolicy = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - ) - self.SetIamPolicy = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - ) - self.TestIamPermissions = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, - ) - self.ListTasks = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/ListTasks", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, - ) - self.GetTask = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/GetTask", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, - ) - self.CreateTask = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/CreateTask", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, - ) - self.DeleteTask = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/DeleteTask", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.RunTask = channel.unary_unary( - "/google.cloud.tasks.v2.CloudTasks/RunTask", - request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, - ) - - -class CloudTasksServicer(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - def ListQueues(self, request, context): - """Lists queues. - - Queues are returned in lexicographical order. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetQueue(self, request, context): - """Gets a queue. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateQueue(self, request, context): - """Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless of whether - it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateQueue(self, request, context): - """Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless of whether - it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteQueue(self, request, context): - """Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PurgeQueue(self, request, context): - """Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PauseQueue(self, request, context): - """Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. Tasks can still be added - when the queue is paused. A queue is paused if its - [state][google.cloud.tasks.v2.Queue.state] is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ResumeQueue(self, request, context): - """Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The state of a queue is stored - in the queue's [state][google.cloud.tasks.v2.Queue.state]; after calling this method it - will be set to [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can - lead to target overloading. If you are resuming high-QPS - queues, follow the 500/50/5 pattern described in - [Managing Cloud Tasks Scaling - Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetIamPolicy(self, request, context): - """Gets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. - Returns an empty policy if the resource exists and does not have a policy - set. - - Authorization requires the following - [Google IAM](https://cloud.google.com/iam) permission on the specified - resource parent: - - * `cloudtasks.queues.getIamPolicy` - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def SetIamPolicy(self, request, context): - """Sets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following - [Google IAM](https://cloud.google.com/iam) permission on the specified - resource parent: - - * `cloudtasks.queues.setIamPolicy` - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def TestIamPermissions(self, request, context): - """Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2.Queue]. - If the resource does not exist, this will return an empty set of - permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building permission-aware - UIs and command-line tools, not for authorization checking. This operation - may "fail open" without warning. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTasks(self, request, context): - """Lists the tasks in a queue. - - By default, only the [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved - due to performance considerations; - [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] controls the - subset of information which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTask(self, request, context): - """Gets a task. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateTask(self, request, context): - """Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - * The maximum task size is 100KB. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTask(self, request, context): - """Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has executed successfully or permanently - failed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def RunTask(self, request, context): - """Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2.RateLimits] or - is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can be used to retry a failed - task after a fix has been made or to manually force a task to be - dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the [status][Task.status] after the task is dispatched but - before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will be reset to the time that - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called plus the retry delay specified - in the queue's [RetryConfig][google.cloud.tasks.v2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_CloudTasksServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListQueues": grpc.unary_unary_rpc_method_handler( - servicer.ListQueues, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.SerializeToString, - ), - "GetQueue": grpc.unary_unary_rpc_method_handler( - servicer.GetQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "CreateQueue": grpc.unary_unary_rpc_method_handler( - servicer.CreateQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "UpdateQueue": grpc.unary_unary_rpc_method_handler( - servicer.UpdateQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "DeleteQueue": grpc.unary_unary_rpc_method_handler( - servicer.DeleteQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "PurgeQueue": grpc.unary_unary_rpc_method_handler( - servicer.PurgeQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "PauseQueue": grpc.unary_unary_rpc_method_handler( - servicer.PauseQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "ResumeQueue": grpc.unary_unary_rpc_method_handler( - servicer.ResumeQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "GetIamPolicy": grpc.unary_unary_rpc_method_handler( - servicer.GetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - "SetIamPolicy": grpc.unary_unary_rpc_method_handler( - servicer.SetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - "TestIamPermissions": grpc.unary_unary_rpc_method_handler( - servicer.TestIamPermissions, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, - ), - "ListTasks": grpc.unary_unary_rpc_method_handler( - servicer.ListTasks, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.SerializeToString, - ), - "GetTask": grpc.unary_unary_rpc_method_handler( - servicer.GetTask, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "CreateTask": grpc.unary_unary_rpc_method_handler( - servicer.CreateTask, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "DeleteTask": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTask, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "RunTask": grpc.unary_unary_rpc_method_handler( - servicer.RunTask, - request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.tasks.v2.CloudTasks", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class CloudTasks(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - @staticmethod - def ListQueues( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/ListQueues", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/GetQueue", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/CreateQueue", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def PurgeQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def PauseQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/PauseQueue", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ResumeQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetIamPolicy( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", - google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, - google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def SetIamPolicy( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", - google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, - google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def TestIamPermissions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", - google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, - google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTasks( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/ListTasks", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/GetTask", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/CreateTask", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/DeleteTask", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def RunTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2.CloudTasks/RunTask", - google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/google/cloud/tasks_v2/proto/queue_pb2.py b/google/cloud/tasks_v2/proto/queue_pb2.py deleted file mode 100644 index c74256b7..00000000 --- a/google/cloud/tasks_v2/proto/queue_pb2.py +++ /dev/null @@ -1,799 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2/proto/queue.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2.proto import ( - target_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2/proto/queue.proto", - package="google.cloud.tasks.v2", - syntax="proto3", - serialized_options=b"\n\031com.google.cloud.tasks.v2B\nQueueProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\'google/cloud/tasks_v2/proto/queue.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xb2\x04\n\x05Queue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12L\n\x1b\x61pp_engine_routing_override\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x36\n\x0brate_limits\x18\x03 \x01(\x0b\x32!.google.cloud.tasks.v2.RateLimits\x12\x38\n\x0cretry_config\x18\x04 \x01(\x0b\x32".google.cloud.tasks.v2.RetryConfig\x12\x31\n\x05state\x18\x05 \x01(\x0e\x32".google.cloud.tasks.v2.Queue.State\x12.\n\npurge_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12S\n\x1astackdriver_logging_config\x18\t \x01(\x0b\x32/.google.cloud.tasks.v2.StackdriverLoggingConfig"E\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03:\\\xea\x41Y\n\x1f\x63loudtasks.googleapis.com/Queue\x12\x36projects/{project}/locations/{location}/queues/{queue}"j\n\nRateLimits\x12!\n\x19max_dispatches_per_second\x18\x01 \x01(\x01\x12\x16\n\x0emax_burst_size\x18\x02 \x01(\x05\x12!\n\x19max_concurrent_dispatches\x18\x03 \x01(\x05"\xd1\x01\n\x0bRetryConfig\x12\x14\n\x0cmax_attempts\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmin_backoff\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmax_backoff\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05"2\n\x18StackdriverLoggingConfig\x12\x16\n\x0esampling_ratio\x18\x01 \x01(\x01\x42\x65\n\x19\x63om.google.cloud.tasks.v2B\nQueueProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3', - dependencies=[ - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_QUEUE_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.tasks.v2.Queue.State", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PAUSED", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISABLED", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=630, - serialized_end=699, -) -_sym_db.RegisterEnumDescriptor(_QUEUE_STATE) - - -_QUEUE = _descriptor.Descriptor( - name="Queue", - full_name="google.cloud.tasks.v2.Queue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.Queue.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_routing_override", - full_name="google.cloud.tasks.v2.Queue.app_engine_routing_override", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="rate_limits", - full_name="google.cloud.tasks.v2.Queue.rate_limits", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retry_config", - full_name="google.cloud.tasks.v2.Queue.retry_config", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.tasks.v2.Queue.state", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="purge_time", - full_name="google.cloud.tasks.v2.Queue.purge_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="stackdriver_logging_config", - full_name="google.cloud.tasks.v2.Queue.stackdriver_logging_config", - index=6, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_QUEUE_STATE,], - serialized_options=b"\352AY\n\037cloudtasks.googleapis.com/Queue\0226projects/{project}/locations/{location}/queues/{queue}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=231, - serialized_end=793, -) - - -_RATELIMITS = _descriptor.Descriptor( - name="RateLimits", - full_name="google.cloud.tasks.v2.RateLimits", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="max_dispatches_per_second", - full_name="google.cloud.tasks.v2.RateLimits.max_dispatches_per_second", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_burst_size", - full_name="google.cloud.tasks.v2.RateLimits.max_burst_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_concurrent_dispatches", - full_name="google.cloud.tasks.v2.RateLimits.max_concurrent_dispatches", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=795, - serialized_end=901, -) - - -_RETRYCONFIG = _descriptor.Descriptor( - name="RetryConfig", - full_name="google.cloud.tasks.v2.RetryConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="max_attempts", - full_name="google.cloud.tasks.v2.RetryConfig.max_attempts", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_retry_duration", - full_name="google.cloud.tasks.v2.RetryConfig.max_retry_duration", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="min_backoff", - full_name="google.cloud.tasks.v2.RetryConfig.min_backoff", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_backoff", - full_name="google.cloud.tasks.v2.RetryConfig.max_backoff", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_doublings", - full_name="google.cloud.tasks.v2.RetryConfig.max_doublings", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=904, - serialized_end=1113, -) - - -_STACKDRIVERLOGGINGCONFIG = _descriptor.Descriptor( - name="StackdriverLoggingConfig", - full_name="google.cloud.tasks.v2.StackdriverLoggingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sampling_ratio", - full_name="google.cloud.tasks.v2.StackdriverLoggingConfig.sampling_ratio", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1115, - serialized_end=1165, -) - -_QUEUE.fields_by_name[ - "app_engine_routing_override" -].message_type = ( - google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._APPENGINEROUTING -) -_QUEUE.fields_by_name["rate_limits"].message_type = _RATELIMITS -_QUEUE.fields_by_name["retry_config"].message_type = _RETRYCONFIG -_QUEUE.fields_by_name["state"].enum_type = _QUEUE_STATE -_QUEUE.fields_by_name[ - "purge_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_QUEUE.fields_by_name[ - "stackdriver_logging_config" -].message_type = _STACKDRIVERLOGGINGCONFIG -_QUEUE_STATE.containing_type = _QUEUE -_RETRYCONFIG.fields_by_name[ - "max_retry_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYCONFIG.fields_by_name[ - "min_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYCONFIG.fields_by_name[ - "max_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -DESCRIPTOR.message_types_by_name["Queue"] = _QUEUE -DESCRIPTOR.message_types_by_name["RateLimits"] = _RATELIMITS -DESCRIPTOR.message_types_by_name["RetryConfig"] = _RETRYCONFIG -DESCRIPTOR.message_types_by_name["StackdriverLoggingConfig"] = _STACKDRIVERLOGGINGCONFIG -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Queue = _reflection.GeneratedProtocolMessageType( - "Queue", - (_message.Message,), - { - "DESCRIPTOR": _QUEUE, - "__module__": "google.cloud.tasks_v2.proto.queue_pb2", - "__doc__": """A queue is a container of related tasks. Queues are configured to - manage how those tasks are dispatched. Configurable properties include - rate limits, retry options, queue types, and others. - - Attributes: - name: - Caller-specified and required in - [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue], - after which it becomes output only. The queue name. The - queue name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For more - information, see `Identifying projects - `_ - ``LOCATION_ID`` - is the canonical ID for the queue’s location. The list of - available locations can be obtained by calling [ListLocatio - ns][google.cloud.location.Locations.ListLocations]. For - more information, see - https://cloud.google.com/about/locations/. - ``QUEUE_ID`` can - contain letters ([A-Za-z]), numbers ([0-9]), or hyphens - (-). The maximum length is 100 characters. - app_engine_routing_override: - Overrides for [task-level app_engine_routing][google.cloud.tas - ks.v2.AppEngineHttpRequest.app_engine_routing]. These settings - apply only to [App Engine - tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this - queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are not - affected. If set, ``app_engine_routing_override`` is used for - all [App Engine - tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in the - queue, no matter what the setting is for the [task-level app_e - ngine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app\_ - engine_routing]. - rate_limits: - Rate limits for task dispatches. - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] and - [retry_config][google.cloud.tasks.v2.Queue.retry_config] are - related because they both control task attempts. However they - control task attempts in different ways: - - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] - controls the total rate of dispatches from a queue - (i.e. all traffic dispatched from the queue, regardless of - whether the dispatch is from a first attempt or a retry). - - [retry_config][google.cloud.tasks.v2.Queue.retry_config] - controls what happens to particular a task after its first - attempt fails. That is, - [retry_config][google.cloud.tasks.v2.Queue.retry_config] - controls task retries (the second attempt, third attempt, - etc). The queue’s actual dispatch rate is the result of: - - Number of tasks in the queue - User-specified throttling: - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits], - [retry_config][google.cloud.tasks.v2.Queue.retry_config], and - the [queue’s state][google.cloud.tasks.v2.Queue.state]. - - System throttling due to ``429`` (Too Many Requests) or - ``503`` (Service Unavailable) responses from the worker, - high error rates, or to smooth sudden large traffic spikes. - retry_config: - Settings that determine the retry behavior. - For tasks - created using Cloud Tasks: the queue-level retry settings - apply to all tasks in the queue that were created using Cloud - Tasks. Retry settings cannot be set on individual tasks. - - For tasks created using the App Engine SDK: the queue-level - retry settings apply to all tasks in the queue which do not - have retry settings explicitly set on the task and were - created by the App Engine SDK. See `App Engine - documentation `_. - state: - Output only. The state of the queue. ``state`` can only be - changed by called - [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue], - [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue], - or uploading `queue.yaml/xml `_. - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] - cannot be used to change ``state``. - purge_time: - Output only. The last time this queue was purged. All tasks - that were [created][google.cloud.tasks.v2.Task.create_time] - before this time were purged. A queue can be purged using - [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue], the - `App Engine Task Queue SDK, or the Cloud Console `_. - Purge time will be truncated to the nearest microsecond. Purge - time will be unset if the queue has never been purged. - stackdriver_logging_config: - Configuration options for writing logs to `Stackdriver Logging - `_. If this field is - unset, then no logs are written. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.Queue) - }, -) -_sym_db.RegisterMessage(Queue) - -RateLimits = _reflection.GeneratedProtocolMessageType( - "RateLimits", - (_message.Message,), - { - "DESCRIPTOR": _RATELIMITS, - "__module__": "google.cloud.tasks_v2.proto.queue_pb2", - "__doc__": """Rate limits. This message determines the maximum rate that tasks can - be dispatched by a queue, regardless of whether the dispatch is a - first task attempt or a retry. Note: The debugging command, - [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask], will run a task - even if the queue has reached its - [RateLimits][google.cloud.tasks.v2.RateLimits]. - - Attributes: - max_dispatches_per_second: - The maximum rate at which tasks are dispatched from this - queue. If unspecified when the queue is created, Cloud Tasks - will pick the default. - The maximum allowed value is 500. - This field has the same meaning as `rate in queue.yaml/xml `_. - max_burst_size: - Output only. The max burst size. Max burst size limits how - fast tasks in queue are processed when many tasks are in the - queue and the rate is high. This field allows the queue to - have a high rate so processing starts shortly after a task is - enqueued, but still limits resource usage when many tasks are - enqueued in a short period of time. The `token bucket - `_ algorithm is used - to control the rate of task dispatches. Each queue has a token - bucket that holds tokens, up to the maximum specified by - ``max_burst_size``. Each time a task is dispatched, a token is - removed from the bucket. Tasks will be dispatched until the - queue’s bucket runs out of tokens. The bucket will be - continuously refilled with new tokens based on [max_dispatches - _per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_p - er_second]. Cloud Tasks will pick the value of - ``max_burst_size`` based on the value of [max_dispatches_per_s - econd][google.cloud.tasks.v2.RateLimits.max_dispatches_per_sec - ond]. For queues that were created or updated using - ``queue.yaml/xml``, ``max_burst_size`` is equal to - `bucket_size `_. Since - ``max_burst_size`` is output only, if - [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] is - called on a queue created by ``queue.yaml/xml``, - ``max_burst_size`` will be reset based on the value of [max_di - spatches_per_second][google.cloud.tasks.v2.RateLimits.max_disp - atches_per_second], regardless of whether [max_dispatches_per\_ - second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_se - cond] is updated. - max_concurrent_dispatches: - The maximum number of concurrent tasks that Cloud Tasks allows - to be dispatched for this queue. After this threshold has been - reached, Cloud Tasks stops dispatching tasks until the number - of concurrent requests decreases. If unspecified when the - queue is created, Cloud Tasks will pick the default. The - maximum allowed value is 5,000. This field has the same - meaning as `max_concurrent_requests in queue.yaml/xml `_. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.RateLimits) - }, -) -_sym_db.RegisterMessage(RateLimits) - -RetryConfig = _reflection.GeneratedProtocolMessageType( - "RetryConfig", - (_message.Message,), - { - "DESCRIPTOR": _RETRYCONFIG, - "__module__": "google.cloud.tasks_v2.proto.queue_pb2", - "__doc__": """Retry config. These settings determine when a failed task attempt is - retried. - - Attributes: - max_attempts: - Number of attempts per task. Cloud Tasks will attempt the - task ``max_attempts`` times (that is, if the first attempt - fails, then there will be ``max_attempts - 1`` retries). Must - be >= -1. If unspecified when the queue is created, Cloud - Tasks will pick the default. -1 indicates unlimited attempts. - This field has the same meaning as `task_retry_limit in - queue.yaml/xml `_. - max_retry_duration: - If positive, ``max_retry_duration`` specifies the time limit - for retrying a failed task, measured from when the task was - first attempted. Once ``max_retry_duration`` time has passed - *and* the task has been attempted - [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] - times, no further attempts will be made and the task will be - deleted. If zero, then the task age is unlimited. If - unspecified when the queue is created, Cloud Tasks will pick - the default. ``max_retry_duration`` will be truncated to the - nearest second. This field has the same meaning as - `task_age_limit in queue.yaml/xml `__. - min_backoff: - A task will be - [scheduled][google.cloud.tasks.v2.Task.schedule_time] for - retry between - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - duration after it fails, if the queue’s - [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies - that the task should be retried. If unspecified when the - queue is created, Cloud Tasks will pick the default. - ``min_backoff`` will be truncated to the nearest second. This - field has the same meaning as `min_backoff_seconds in - queue.yaml/xml `_. - max_backoff: - A task will be - [scheduled][google.cloud.tasks.v2.Task.schedule_time] for - retry between - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] - and - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - duration after it fails, if the queue’s - [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies - that the task should be retried. If unspecified when the - queue is created, Cloud Tasks will pick the default. - ``max_backoff`` will be truncated to the nearest second. This - field has the same meaning as `max_backoff_seconds in - queue.yaml/xml `_. - max_doublings: - The time between retries will double ``max_doublings`` times. - A task’s retry interval starts at - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff], - then doubles ``max_doublings`` times, then increases linearly, - and finally retries retries at intervals of - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - up to - [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] - times. For example, if - [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] - is 10s, - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - is 300s, and ``max_doublings`` is 3, then the a task will - first be retried in 10s. The retry interval will double three - times, and then increase linearly by 2^3 \* 10s. Finally, the - task will retry at intervals of - [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] - until the task has been attempted - [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] - times. Thus, the requests will retry at 10s, 20s, 40s, 80s, - 160s, 240s, 300s, 300s, …. If unspecified when the queue is - created, Cloud Tasks will pick the default. This field has - the same meaning as `max_doublings in queue.yaml/xml `_. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.RetryConfig) - }, -) -_sym_db.RegisterMessage(RetryConfig) - -StackdriverLoggingConfig = _reflection.GeneratedProtocolMessageType( - "StackdriverLoggingConfig", - (_message.Message,), - { - "DESCRIPTOR": _STACKDRIVERLOGGINGCONFIG, - "__module__": "google.cloud.tasks_v2.proto.queue_pb2", - "__doc__": """Configuration options for writing logs to `Stackdriver Logging - `_. - - Attributes: - sampling_ratio: - Specifies the fraction of operations to write to `Stackdriver - Logging `_. This - field may contain any value between 0.0 and 1.0, inclusive. - 0.0 is the default and means that no operations are logged. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.StackdriverLoggingConfig) - }, -) -_sym_db.RegisterMessage(StackdriverLoggingConfig) - - -DESCRIPTOR._options = None -_QUEUE._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2/proto/queue_pb2_grpc.py b/google/cloud/tasks_v2/proto/queue_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2/proto/queue_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2/proto/target_pb2.py b/google/cloud/tasks_v2/proto/target_pb2.py deleted file mode 100644 index a890c3d6..00000000 --- a/google/cloud/tasks_v2/proto/target_pb2.py +++ /dev/null @@ -1,1147 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2/proto/target.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2/proto/target.proto", - package="google.cloud.tasks.v2", - syntax="proto3", - serialized_options=b"\n\031com.google.cloud.tasks.v2B\013TargetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n(google/cloud/tasks_v2/proto/target.proto\x12\x15google.cloud.tasks.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xe1\x02\n\x0bHttpRequest\x12\x10\n\x03url\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x36\n\x0bhttp_method\x18\x02 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12@\n\x07headers\x18\x03 \x03(\x0b\x32/.google.cloud.tasks.v2.HttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x38\n\x0boauth_token\x18\x05 \x01(\x0b\x32!.google.cloud.tasks.v2.OAuthTokenH\x00\x12\x36\n\noidc_token\x18\x06 \x01(\x0b\x32 .google.cloud.tasks.v2.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xb2\x02\n\x14\x41ppEngineHttpRequest\x12\x36\n\x0bhttp_method\x18\x01 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12\x43\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12I\n\x07headers\x18\x04 \x03(\x0b\x32\x38.google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\x66\n\x19\x63om.google.cloud.tasks.v2B\x0bTargetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - -_HTTPMETHOD = _descriptor.EnumDescriptor( - name="HttpMethod", - full_name="google.cloud.tasks.v2.HttpMethod", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="HTTP_METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POST", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GET", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="HEAD", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PUT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DELETE", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PATCH", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OPTIONS", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1003, - serialized_end=1118, -) -_sym_db.RegisterEnumDescriptor(_HTTPMETHOD) - -HttpMethod = enum_type_wrapper.EnumTypeWrapper(_HTTPMETHOD) -HTTP_METHOD_UNSPECIFIED = 0 -POST = 1 -GET = 2 -HEAD = 3 -PUT = 4 -DELETE = 5 -PATCH = 6 -OPTIONS = 7 - - -_HTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( - name="HeadersEntry", - full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=414, - serialized_end=460, -) - -_HTTPREQUEST = _descriptor.Descriptor( - name="HttpRequest", - full_name="google.cloud.tasks.v2.HttpRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="google.cloud.tasks.v2.HttpRequest.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="http_method", - full_name="google.cloud.tasks.v2.HttpRequest.http_method", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="headers", - full_name="google.cloud.tasks.v2.HttpRequest.headers", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="body", - full_name="google.cloud.tasks.v2.HttpRequest.body", - index=3, - number=4, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="oauth_token", - full_name="google.cloud.tasks.v2.HttpRequest.oauth_token", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="oidc_token", - full_name="google.cloud.tasks.v2.HttpRequest.oidc_token", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_HTTPREQUEST_HEADERSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="authorization_header", - full_name="google.cloud.tasks.v2.HttpRequest.authorization_header", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=131, - serialized_end=484, -) - - -_APPENGINEHTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( - name="HeadersEntry", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=414, - serialized_end=460, -) - -_APPENGINEHTTPREQUEST = _descriptor.Descriptor( - name="AppEngineHttpRequest", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="http_method", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.http_method", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_routing", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="relative_uri", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="headers", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.headers", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="body", - full_name="google.cloud.tasks.v2.AppEngineHttpRequest.body", - index=4, - number=5, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_APPENGINEHTTPREQUEST_HEADERSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=487, - serialized_end=793, -) - - -_APPENGINEROUTING = _descriptor.Descriptor( - name="AppEngineRouting", - full_name="google.cloud.tasks.v2.AppEngineRouting", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service", - full_name="google.cloud.tasks.v2.AppEngineRouting.service", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.tasks.v2.AppEngineRouting.version", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="instance", - full_name="google.cloud.tasks.v2.AppEngineRouting.instance", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="host", - full_name="google.cloud.tasks.v2.AppEngineRouting.host", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=795, - serialized_end=879, -) - - -_OAUTHTOKEN = _descriptor.Descriptor( - name="OAuthToken", - full_name="google.cloud.tasks.v2.OAuthToken", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.cloud.tasks.v2.OAuthToken.service_account_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="scope", - full_name="google.cloud.tasks.v2.OAuthToken.scope", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=881, - serialized_end=939, -) - - -_OIDCTOKEN = _descriptor.Descriptor( - name="OidcToken", - full_name="google.cloud.tasks.v2.OidcToken", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.cloud.tasks.v2.OidcToken.service_account_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="audience", - full_name="google.cloud.tasks.v2.OidcToken.audience", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=941, - serialized_end=1001, -) - -_HTTPREQUEST_HEADERSENTRY.containing_type = _HTTPREQUEST -_HTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD -_HTTPREQUEST.fields_by_name["headers"].message_type = _HTTPREQUEST_HEADERSENTRY -_HTTPREQUEST.fields_by_name["oauth_token"].message_type = _OAUTHTOKEN -_HTTPREQUEST.fields_by_name["oidc_token"].message_type = _OIDCTOKEN -_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( - _HTTPREQUEST.fields_by_name["oauth_token"] -) -_HTTPREQUEST.fields_by_name[ - "oauth_token" -].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] -_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( - _HTTPREQUEST.fields_by_name["oidc_token"] -) -_HTTPREQUEST.fields_by_name[ - "oidc_token" -].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] -_APPENGINEHTTPREQUEST_HEADERSENTRY.containing_type = _APPENGINEHTTPREQUEST -_APPENGINEHTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD -_APPENGINEHTTPREQUEST.fields_by_name[ - "app_engine_routing" -].message_type = _APPENGINEROUTING -_APPENGINEHTTPREQUEST.fields_by_name[ - "headers" -].message_type = _APPENGINEHTTPREQUEST_HEADERSENTRY -DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST -DESCRIPTOR.message_types_by_name["AppEngineHttpRequest"] = _APPENGINEHTTPREQUEST -DESCRIPTOR.message_types_by_name["AppEngineRouting"] = _APPENGINEROUTING -DESCRIPTOR.message_types_by_name["OAuthToken"] = _OAUTHTOKEN -DESCRIPTOR.message_types_by_name["OidcToken"] = _OIDCTOKEN -DESCRIPTOR.enum_types_by_name["HttpMethod"] = _HTTPMETHOD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -HttpRequest = _reflection.GeneratedProtocolMessageType( - "HttpRequest", - (_message.Message,), - { - "HeadersEntry": _reflection.GeneratedProtocolMessageType( - "HeadersEntry", - (_message.Message,), - { - "DESCRIPTOR": _HTTPREQUEST_HEADERSENTRY, - "__module__": "google.cloud.tasks_v2.proto.target_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest.HeadersEntry) - }, - ), - "DESCRIPTOR": _HTTPREQUEST, - "__module__": "google.cloud.tasks_v2.proto.target_pb2", - "__doc__": """HTTP request. The task will be pushed to the worker as an HTTP - request. If the worker or the redirected worker acknowledges the task - by returning a successful HTTP response code ([``200`` - ``299``]), - the task will be removed from the queue. If any other HTTP response - code is returned or no response is received, the task will be retried - according to the following: - User-specified throttling: [retry - configuration][google.cloud.tasks.v2.Queue.retry_config], [rate - limits][google.cloud.tasks.v2.Queue.rate_limits], and the [queue’s - state][google.cloud.tasks.v2.Queue.state]. - System throttling: To - prevent the worker from overloading, Cloud Tasks may temporarily - reduce the queue’s effective rate. User-specified settings will not - be changed. System throttling happens because: - Cloud Tasks backs - off on all errors. Normally the backoff specified in [rate - limits][google.cloud.tasks.v2.Queue.rate_limits] will be used. But - if the worker returns ``429`` (Too Many Requests), ``503`` (Service - Unavailable), or the rate of errors is high, Cloud Tasks will use a - higher backoff rate. The retry specified in the ``Retry-After`` - HTTP response header is considered. - To prevent traffic spikes and - to smooth sudden increases in traffic, dispatches ramp up slowly - when the queue is newly created or idle and if large numbers of - tasks suddenly become available to dispatch (due to spikes in - create task rates, the queue being unpaused, or many tasks that are - scheduled at the same time). - - Attributes: - url: - Required. The full url path that the request will be sent to. - This string must begin with either “http://” or “https://”. - Some examples are: ``http://acme.com`` and - ``https://acme.com/sales:8080``. Cloud Tasks will encode some - characters for safety and compatibility. The maximum allowed - URL length is 2083 characters after encoding. The - ``Location`` header response from a redirect response [``300`` - - ``399``] may be followed. The redirect is not counted as a - separate attempt. - http_method: - The HTTP method to use for the request. The default is POST. - headers: - HTTP request headers. This map contains the header field - names and values. Headers can be set when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - These headers represent a subset of the headers that will - accompany the task’s HTTP request. Some HTTP request headers - will be ignored or replaced. A partial list of headers that - will be ignored or replaced is: - Host: This will be - computed by Cloud Tasks and derived from - [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. - - Content-Length: This will be computed by Cloud Tasks. - User- - Agent: This will be set to ``"Google-Cloud-Tasks"``. - - X-Google-*: Google use only. - X-AppEngine-*: Google use - only. ``Content-Type`` won’t be set by Cloud Tasks. You can - explicitly set ``Content-Type`` to a media type when the [task - is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/octet-stream"`` or ``"application/json"``. - Headers which can have multiple values (according to RFC2616) - can be specified using comma-separated values. The size of - the headers must be less than 80KB. - body: - HTTP request body. A request body is allowed only if the - [HTTP method][google.cloud.tasks.v2.HttpRequest.http_method] - is POST, PUT, or PATCH. It is an error to set body on a task - with an incompatible - [HttpMethod][google.cloud.tasks.v2.HttpMethod]. - authorization_header: - The mode for generating an ``Authorization`` header for HTTP - requests. If specified, all ``Authorization`` headers in the - [HttpRequest.headers][google.cloud.tasks.v2.HttpRequest.header - s] field will be overridden. - oauth_token: - If specified, an `OAuth token - `\_ - will be generated and attached as an ``Authorization`` header - in the HTTP request. This type of authorization should - generally only be used when calling Google APIs hosted on - \*.googleapis.com. - oidc_token: - If specified, an `OIDC `_ token will be generated and - attached as an ``Authorization`` header in the HTTP request. - This type of authorization can be used for many scenarios, - including calling Cloud Run, or endpoints where you intend to - validate the token yourself. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest) - }, -) -_sym_db.RegisterMessage(HttpRequest) -_sym_db.RegisterMessage(HttpRequest.HeadersEntry) - -AppEngineHttpRequest = _reflection.GeneratedProtocolMessageType( - "AppEngineHttpRequest", - (_message.Message,), - { - "HeadersEntry": _reflection.GeneratedProtocolMessageType( - "HeadersEntry", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEHTTPREQUEST_HEADERSENTRY, - "__module__": "google.cloud.tasks_v2.proto.target_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry) - }, - ), - "DESCRIPTOR": _APPENGINEHTTPREQUEST, - "__module__": "google.cloud.tasks_v2.proto.target_pb2", - "__doc__": """App Engine HTTP request. The message defines the HTTP request that is - sent to an App Engine app when the task is dispatched. Using - [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] - requires ```appengine.applications.get`` - `\_ - Google IAM permission for the project and the following scope: - ``https://www.googleapis.com/auth/cloud-platform`` The task will be - delivered to the App Engine app which belongs to the same project as - the queue. For more information, see `How Requests are Routed - `_ and how routing is affected by `dispatch files `_. - Traffic is encrypted during transport and never leaves Google - datacenters. Because this traffic is carried over a communication - mechanism internal to Google, you cannot explicitly set the protocol - (for example, HTTP or HTTPS). The request to the handler, however, - will appear to have used the HTTP protocol. The - [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] used to - construct the URL that the task is delivered to can be set at the - queue-level or task-level: - If [app_engine_routing_override is set - on the queue][Queue.app_engine_routing_override], this value is - used for all tasks in the queue, no matter what the setting is for - the [task-level - app_engine_routing][AppEngineHttpRequest.app_engine_routing]. The - ``url`` that the task will be sent to is: - ``url =`` - [host][google.cloud.tasks.v2.AppEngineRouting.host] ``+`` [relative - _uri][google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri] Tasks - can be dispatched to secure app handlers, unsecure app handlers, and - URIs restricted with ```login: admin`` `_. Because tasks are not run - as any user, they cannot be dispatched to URIs restricted with - ```login: required`` `_ Task dispatches also do not follow - redirects. The task attempt has succeeded if the app’s request - handler returns an HTTP response code in the range [``200`` - - ``299``]. The task attempt has failed if the app’s handler returns a - non-2xx response code or Cloud Tasks does not receive response before - the [deadline][google.cloud.tasks.v2.Task.dispatch_deadline]. Failed - tasks will be retried according to the [retry - configuration][google.cloud.tasks.v2.Queue.retry_config]. ``503`` - (Service Unavailable) is considered an App Engine system error instead - of an application error and will cause Cloud Tasks’ traffic congestion - control to temporarily throttle the queue’s dispatches. Unlike other - types of task targets, a ``429`` (Too Many Requests) response from an - app handler does not cause traffic congestion control to throttle the - queue. - - Attributes: - http_method: - The HTTP method to use for the request. The default is POST. - The app’s request handler for the task’s target URL must be - able to handle HTTP requests with this http_method, otherwise - the task attempt will fail with error code 405 (Method Not - Allowed). See `Writing a push task request handler `_ and the - documentation for the request handlers in the language your - app is written in e.g. `Python Request Handler `_. - app_engine_routing: - Task-level setting for App Engine routing. - If - [app_engine_routing_override is set on the - queue][Queue.app_engine_routing_override], this value is used - for all tasks in the queue, no matter what the setting is - for the [task-level - app_engine_routing][AppEngineHttpRequest.app_engine_routing]. - relative_uri: - The relative URI. The relative URI must begin with “/” and - must be a valid HTTP relative URI. It can contain a path and - query string arguments. If the relative URI is empty, then the - root path “/” will be used. No spaces are allowed, and the - maximum length allowed is 2083 characters. - headers: - HTTP request headers. This map contains the header field - names and values. Headers can be set when the [task is - created][google.cloud.tasks.v2.CloudTasks.CreateTask]. - Repeated headers are not supported but a header value can - contain commas. Cloud Tasks sets some headers to default - values: - ``User-Agent``: By default, this header is - ``"AppEngine-Google; (+http://code.google.com/appengine)"``. - This header can be modified, but Cloud Tasks will append - ``"AppEngine-Google; (+http://code.google.com/appengine)"`` to - the modified ``User-Agent``. If the task has a - [body][google.cloud.tasks.v2.AppEngineHttpRequest.body], Cloud - Tasks sets the following headers: - ``Content-Type``: By - default, the ``Content-Type`` header is set to - ``"application/octet-stream"``. The default can be overridden - by explicitly setting ``Content-Type`` to a particular - media type when the [task is - created][google.cloud.tasks.v2.CloudTasks.CreateTask]. For - example, ``Content-Type`` can be set to - ``"application/json"``. - ``Content-Length``: This is - computed by Cloud Tasks. This value is output only. It - cannot be changed. The headers below cannot be set or - overridden: - ``Host`` - ``X-Google-*`` - - ``X-AppEngine-*`` In addition, Cloud Tasks sets some headers - when the task is dispatched, such as headers containing - information about the task; see `request headers - `_. These headers are set - only when the task is dispatched, so they are not visible when - the task is returned in a Cloud Tasks response. Although - there is no specific limit for the maximum number of headers - or the size, there is a limit on the maximum size of the - [Task][google.cloud.tasks.v2.Task]. For more information, see - the [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] - documentation. - body: - HTTP request body. A request body is allowed only if the HTTP - method is POST or PUT. It is an error to set a body on a task - with an incompatible - [HttpMethod][google.cloud.tasks.v2.HttpMethod]. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.AppEngineHttpRequest) - }, -) -_sym_db.RegisterMessage(AppEngineHttpRequest) -_sym_db.RegisterMessage(AppEngineHttpRequest.HeadersEntry) - -AppEngineRouting = _reflection.GeneratedProtocolMessageType( - "AppEngineRouting", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEROUTING, - "__module__": "google.cloud.tasks_v2.proto.target_pb2", - "__doc__": """App Engine Routing. Defines routing characteristics specific to App - Engine - service, version, and instance. For more information about - services, versions, and instances see `An Overview of App Engine - `_, `Microservices Architecture on Google App Engine - `_, `App Engine Standard request routing - `_, and `App Engine Flex request routing - `_. Using - [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] requires - ```appengine.applications.get`` - `\_ - Google IAM permission for the project and the following scope: - ``https://www.googleapis.com/auth/cloud-platform`` - - Attributes: - service: - App service. By default, the task is sent to the service - which is the default service when the task is attempted. For - some queues or tasks which were created using the App Engine - Task Queue API, - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable into - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable, then - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance] - are the empty string. - version: - App version. By default, the task is sent to the version - which is the default version when the task is attempted. For - some queues or tasks which were created using the App Engine - Task Queue API, - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable into - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. - For example, some tasks which were created using the App - Engine SDK use a custom domain name; custom domains are not - parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2.AppEngineRouting.host] is not - parsable, then - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance] - are the empty string. - instance: - App instance. By default, the task is sent to an instance - which is available when the task is attempted. Requests can - only be sent to a specific instance if `manual scaling is used - in App Engine Standard - `_. - App Engine Flex does not support instances. For more - information, see `App Engine Standard request routing - `_ and `App Engine Flex request routing - `_. - host: - Output only. The host that the task is sent to. The host is - constructed from the domain name of the app associated with - the queue’s project ID (for example .appspot.com), and the - [service][google.cloud.tasks.v2.AppEngineRouting.service], - [version][google.cloud.tasks.v2.AppEngineRouting.version], and - [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. - Tasks which were created using the App Engine SDK might have a - custom domain name. For more information, see `How Requests - are Routed - `_. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.AppEngineRouting) - }, -) -_sym_db.RegisterMessage(AppEngineRouting) - -OAuthToken = _reflection.GeneratedProtocolMessageType( - "OAuthToken", - (_message.Message,), - { - "DESCRIPTOR": _OAUTHTOKEN, - "__module__": "google.cloud.tasks_v2.proto.target_pb2", - "__doc__": """Contains information needed for generating an `OAuth token - `_. This - type of authorization should generally only be used when calling - Google APIs hosted on \*.googleapis.com. - - Attributes: - service_account_email: - \ `Service account email - `_ to be - used for generating OAuth token. The service account must be - within the same project as the queue. The caller must have - iam.serviceAccounts.actAs permission for the service account. - scope: - OAuth scope to be used for generating OAuth access token. If - not specified, “https://www.googleapis.com/auth/cloud- - platform” will be used. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OAuthToken) - }, -) -_sym_db.RegisterMessage(OAuthToken) - -OidcToken = _reflection.GeneratedProtocolMessageType( - "OidcToken", - (_message.Message,), - { - "DESCRIPTOR": _OIDCTOKEN, - "__module__": "google.cloud.tasks_v2.proto.target_pb2", - "__doc__": """Contains information needed for generating an `OpenID Connect token - `_. - This type of authorization can be used for many scenarios, including - calling Cloud Run, or endpoints where you intend to validate the token - yourself. - - Attributes: - service_account_email: - \ `Service account email - `_ to be - used for generating OIDC token. The service account must be - within the same project as the queue. The caller must have - iam.serviceAccounts.actAs permission for the service account. - audience: - Audience to be used when generating OIDC token. If not - specified, the URI specified in target will be used. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OidcToken) - }, -) -_sym_db.RegisterMessage(OidcToken) - - -DESCRIPTOR._options = None -_HTTPREQUEST_HEADERSENTRY._options = None -_HTTPREQUEST.fields_by_name["url"]._options = None -_APPENGINEHTTPREQUEST_HEADERSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2/proto/target_pb2_grpc.py b/google/cloud/tasks_v2/proto/target_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2/proto/target_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2/proto/task_pb2.py b/google/cloud/tasks_v2/proto/task_pb2.py deleted file mode 100644 index fcf78524..00000000 --- a/google/cloud/tasks_v2/proto/task_pb2.py +++ /dev/null @@ -1,604 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2/proto/task.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2.proto import ( - target_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2/proto/task.proto", - package="google.cloud.tasks.v2", - syntax="proto3", - serialized_options=b"\n\031com.google.cloud.tasks.v2B\tTaskProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n&google/cloud/tasks_v2/proto/task.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xb4\x05\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x17\x61pp_engine_http_request\x18\x02 \x01(\x0b\x32+.google.cloud.tasks.v2.AppEngineHttpRequestH\x00\x12:\n\x0chttp_request\x18\x03 \x01(\x0b\x32".google.cloud.tasks.v2.HttpRequestH\x00\x12\x31\n\rschedule_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x11\x64ispatch_deadline\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x16\n\x0e\x64ispatch_count\x18\x07 \x01(\x05\x12\x16\n\x0eresponse_count\x18\x08 \x01(\x05\x12\x35\n\rfirst_attempt\x18\t \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12\x34\n\x0clast_attempt\x18\n \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12.\n\x04view\x18\x0b \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cmessage_type"\xcf\x01\n\x07\x41ttempt\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBd\n\x19\x63om.google.cloud.tasks.v2B\tTaskProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3', - dependencies=[ - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_TASK_VIEW = _descriptor.EnumDescriptor( - name="View", - full_name="google.cloud.tasks.v2.Task.View", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="VIEW_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BASIC", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FULL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=776, - serialized_end=825, -) -_sym_db.RegisterEnumDescriptor(_TASK_VIEW) - - -_TASK = _descriptor.Descriptor( - name="Task", - full_name="google.cloud.tasks.v2.Task", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2.Task.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_http_request", - full_name="google.cloud.tasks.v2.Task.app_engine_http_request", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="http_request", - full_name="google.cloud.tasks.v2.Task.http_request", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2.Task.schedule_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.tasks.v2.Task.create_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dispatch_deadline", - full_name="google.cloud.tasks.v2.Task.dispatch_deadline", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dispatch_count", - full_name="google.cloud.tasks.v2.Task.dispatch_count", - index=6, - number=7, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_count", - full_name="google.cloud.tasks.v2.Task.response_count", - index=7, - number=8, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="first_attempt", - full_name="google.cloud.tasks.v2.Task.first_attempt", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="last_attempt", - full_name="google.cloud.tasks.v2.Task.last_attempt", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="view", - full_name="google.cloud.tasks.v2.Task.view", - index=10, - number=11, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_TASK_VIEW,], - serialized_options=b"\352Ae\n\036cloudtasks.googleapis.com/Task\022Cprojects/{project}/locations/{location}/queues/{queue}/tasks/{task}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="message_type", - full_name="google.cloud.tasks.v2.Task.message_type", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=255, - serialized_end=947, -) - - -_ATTEMPT = _descriptor.Descriptor( - name="Attempt", - full_name="google.cloud.tasks.v2.Attempt", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2.Attempt.schedule_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dispatch_time", - full_name="google.cloud.tasks.v2.Attempt.dispatch_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_time", - full_name="google.cloud.tasks.v2.Attempt.response_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_status", - full_name="google.cloud.tasks.v2.Attempt.response_status", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=950, - serialized_end=1157, -) - -_TASK.fields_by_name[ - "app_engine_http_request" -].message_type = ( - google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._APPENGINEHTTPREQUEST -) -_TASK.fields_by_name[ - "http_request" -].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._HTTPREQUEST -_TASK.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TASK.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TASK.fields_by_name[ - "dispatch_deadline" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_TASK.fields_by_name["first_attempt"].message_type = _ATTEMPT -_TASK.fields_by_name["last_attempt"].message_type = _ATTEMPT -_TASK.fields_by_name["view"].enum_type = _TASK_VIEW -_TASK_VIEW.containing_type = _TASK -_TASK.oneofs_by_name["message_type"].fields.append( - _TASK.fields_by_name["app_engine_http_request"] -) -_TASK.fields_by_name["app_engine_http_request"].containing_oneof = _TASK.oneofs_by_name[ - "message_type" -] -_TASK.oneofs_by_name["message_type"].fields.append(_TASK.fields_by_name["http_request"]) -_TASK.fields_by_name["http_request"].containing_oneof = _TASK.oneofs_by_name[ - "message_type" -] -_ATTEMPT.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPT.fields_by_name[ - "dispatch_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPT.fields_by_name[ - "response_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPT.fields_by_name[ - "response_status" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -DESCRIPTOR.message_types_by_name["Task"] = _TASK -DESCRIPTOR.message_types_by_name["Attempt"] = _ATTEMPT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Task = _reflection.GeneratedProtocolMessageType( - "Task", - (_message.Message,), - { - "DESCRIPTOR": _TASK, - "__module__": "google.cloud.tasks_v2.proto.task_pb2", - "__doc__": """A unit of scheduled work. - - Attributes: - name: - Optionally caller-specified in - [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. - The task name. The task name must have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/ta - sks/TASK_ID`` - ``PROJECT_ID`` can contain letters - ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or - periods (.). For more information, see `Identifying - projects `_ - ``LOCATION_ID`` is the - canonical ID for the task’s location. The list of available - locations can be obtained by calling [ListLocations][google - .cloud.location.Locations.ListLocations]. For more - information, see https://cloud.google.com/about/locations/. - - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers ([0-9]), - or hyphens (-). The maximum length is 100 characters. - - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), or underscores (_). The maximum - length is 500 characters. - message_type: - Required. The message to send to the worker. - app_engine_http_request: - HTTP request that is sent to the App Engine app handler. An - App Engine task is a task that has [AppEngineHttpRequest][goog - le.cloud.tasks.v2.AppEngineHttpRequest] set. - http_request: - HTTP request that is sent to the worker. An HTTP task is a - task that has [HttpRequest][google.cloud.tasks.v2.HttpRequest] - set. - schedule_time: - The time when the task is scheduled to be attempted or - retried. ``schedule_time`` will be truncated to the nearest - microsecond. - create_time: - Output only. The time that the task was created. - ``create_time`` will be truncated to the nearest second. - dispatch_deadline: - The deadline for requests sent to the worker. If the worker - does not respond by this deadline then the request is - cancelled and the attempt is marked as a ``DEADLINE_EXCEEDED`` - failure. Cloud Tasks will retry the task according to the - [RetryConfig][google.cloud.tasks.v2.RetryConfig]. Note that - when the request is cancelled, Cloud Tasks will stop listing - for the response, but whether the worker stops processing - depends on the worker. For example, if the worker is stuck, it - may not react to cancelled requests. The default and maximum - values depend on the type of request: - For [HTTP - tasks][google.cloud.tasks.v2.HttpRequest], the default is - 10 minutes. The deadline must be in the interval [15 seconds, - 30 minutes]. - For [App Engine - tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 - indicates that the request has the default deadline. The - default deadline depends on the `scaling type - `_ of the service: - 10 minutes for standard apps with automatic scaling, 24 - hours for standard apps with manual and basic scaling, and 60 - minutes for flex apps. If the request deadline is set, it must - be in the interval [15 seconds, 24 hours 15 seconds]. - Regardless of the task’s ``dispatch_deadline``, the app - handler will not run for longer than than the service’s - timeout. We recommend setting the ``dispatch_deadline`` to - at most a few seconds more than the app handler’s timeout. - For more information see `Timeouts - `_. ``dispatch_deadline`` will be - truncated to the nearest millisecond. The deadline is an - approximate deadline. - dispatch_count: - Output only. The number of attempts dispatched. This count - includes attempts which have been dispatched but haven’t - received a response. - response_count: - Output only. The number of attempts which have received a - response. - first_attempt: - Output only. The status of the task’s first attempt. Only - [dispatch_time][google.cloud.tasks.v2.Attempt.dispatch_time] - will be set. The other - [Attempt][google.cloud.tasks.v2.Attempt] information is not - retained by Cloud Tasks. - last_attempt: - Output only. The status of the task’s last attempt. - view: - Output only. The view specifies which subset of the - [Task][google.cloud.tasks.v2.Task] has been returned. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.Task) - }, -) -_sym_db.RegisterMessage(Task) - -Attempt = _reflection.GeneratedProtocolMessageType( - "Attempt", - (_message.Message,), - { - "DESCRIPTOR": _ATTEMPT, - "__module__": "google.cloud.tasks_v2.proto.task_pb2", - "__doc__": """The status of a task attempt. - - Attributes: - schedule_time: - Output only. The time that this attempt was scheduled. - ``schedule_time`` will be truncated to the nearest - microsecond. - dispatch_time: - Output only. The time that this attempt was dispatched. - ``dispatch_time`` will be truncated to the nearest - microsecond. - response_time: - Output only. The time that this attempt response was received. - ``response_time`` will be truncated to the nearest - microsecond. - response_status: - Output only. The response from the worker for this attempt. - If ``response_time`` is unset, then the task has not been - attempted or is currently running and the ``response_status`` - field is meaningless. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.Attempt) - }, -) -_sym_db.RegisterMessage(Attempt) - - -DESCRIPTOR._options = None -_TASK._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2/proto/task_pb2_grpc.py b/google/cloud/tasks_v2/proto/task_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2/proto/task_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2/py.typed b/google/cloud/tasks_v2/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/google/cloud/tasks_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/google/cloud/tasks_v2/services/__init__.py b/google/cloud/tasks_v2/services/__init__.py new file mode 100644 index 00000000..42ffdf2b --- /dev/null +++ b/google/cloud/tasks_v2/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/__init__.py b/google/cloud/tasks_v2/services/cloud_tasks/__init__.py similarity index 71% rename from google/__init__.py rename to google/cloud/tasks_v2/services/cloud_tasks/__init__.py index 9a1b64a6..498f5941 100644 --- a/google/__init__.py +++ b/google/cloud/tasks_v2/services/cloud_tasks/__init__.py @@ -1,24 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil +from .client import CloudTasksClient +from .async_client import CloudTasksAsyncClient - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "CloudTasksClient", + "CloudTasksAsyncClient", +) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/async_client.py b/google/cloud/tasks_v2/services/cloud_tasks/async_client.py new file mode 100644 index 00000000..798f3396 --- /dev/null +++ b/google/cloud/tasks_v2/services/cloud_tasks/async_client.py @@ -0,0 +1,1729 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .client import CloudTasksClient + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + queue_path = staticmethod(CloudTasksClient.queue_path) + + task_path = staticmethod(CloudTasksClient.task_path) + + from_service_account_file = CloudTasksClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(CloudTasksClient).get_transport_class, type(CloudTasksClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_queues( + self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`~.cloudtasks.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue( + self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`~.cloudtasks.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_queue( + self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot + be the same as an existing queue. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, queue]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_queue( + self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2.Queue.name] cannot be + changed. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([queue, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_queue( + self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def purge_queue( + self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`~.cloudtasks.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def pause_queue( + self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Args: + request (:class:`~.cloudtasks.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def resume_queue( + self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`~.cloudtasks.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.GetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.SetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource, permissions]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + elif not request: + request = iam_policy.TestIamPermissionsRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_tasks( + self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`~.cloudtasks.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task( + self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`~.cloudtasks.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_task( + self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (:class:`~.cloudtasks.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`~.gct_task.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is + not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, task]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_task( + self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (:class:`~.cloudtasks.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def run_task( + self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (:class:`~.cloudtasks.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudTasksAsyncClient",) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/client.py b/google/cloud/tasks_v2/services/cloud_tasks/client.py new file mode 100644 index 00000000..00b5bb18 --- /dev/null +++ b/google/cloud/tasks_v2/services/cloud_tasks/client.py @@ -0,0 +1,1837 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[CloudTasksTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def queue_path(project: str, location: str, queue: str,) -> str: + """Return a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, location=location, queue=queue, + ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str, str]: + """Parse a queue path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str, location: str, queue: str, task: str,) -> str: + """Return a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, location=location, queue=queue, task=task, + ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str, str]: + """Parse a task path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_queues( + self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`~.cloudtasks.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue( + self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`~.cloudtasks.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_queue( + self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot + be the same as an existing queue. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_queue( + self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2.Queue.name] cannot be + changed. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_queue( + self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def purge_queue( + self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`~.cloudtasks.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def pause_queue( + self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Args: + request (:class:`~.cloudtasks.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def resume_queue( + self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`~.cloudtasks.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.GetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.SetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + elif not request: + request = iam_policy.TestIamPermissionsRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_tasks( + self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`~.cloudtasks.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task( + self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`~.cloudtasks.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_task( + self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (:class:`~.cloudtasks.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`~.gct_task.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is + not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_task( + self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (:class:`~.cloudtasks.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def run_task( + self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (:class:`~.cloudtasks.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudTasksClient",) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/pagers.py b/google/cloud/tasks_v2/services/cloud_tasks/pagers.py new file mode 100644 index 00000000..466a8d25 --- /dev/null +++ b/google/cloud/tasks_v2/services/cloud_tasks/pagers.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListQueuesRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListQueuesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListQueuesRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListQueuesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListTasksRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListTasksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListTasksRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListTasksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py b/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py new file mode 100644 index 00000000..72f33c1b --- /dev/null +++ b/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry["grpc"] = CloudTasksGrpcTransport +_transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + +__all__ = ( + "CloudTasksTransport", + "CloudTasksGrpcTransport", + "CloudTasksGrpcAsyncIOTransport", +) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py b/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py new file mode 100644 index 00000000..0fc574a0 --- /dev/null +++ b/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py @@ -0,0 +1,397 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, default_timeout=10.0, client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, default_timeout=10.0, client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, default_timeout=10.0, client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, default_timeout=10.0, client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, default_timeout=10.0, client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, default_timeout=10.0, client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, default_timeout=10.0, client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, default_timeout=10.0, client_info=client_info, + ), + } + + @property + def list_queues( + self, + ) -> typing.Callable[ + [cloudtasks.ListQueuesRequest], + typing.Union[ + cloudtasks.ListQueuesResponse, + typing.Awaitable[cloudtasks.ListQueuesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_queue( + self, + ) -> typing.Callable[ + [cloudtasks.GetQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def create_queue( + self, + ) -> typing.Callable[ + [cloudtasks.CreateQueueRequest], + typing.Union[gct_queue.Queue, typing.Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def update_queue( + self, + ) -> typing.Callable[ + [cloudtasks.UpdateQueueRequest], + typing.Union[gct_queue.Queue, typing.Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def delete_queue( + self, + ) -> typing.Callable[ + [cloudtasks.DeleteQueueRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def purge_queue( + self, + ) -> typing.Callable[ + [cloudtasks.PurgeQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def pause_queue( + self, + ) -> typing.Callable[ + [cloudtasks.PauseQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def resume_queue( + self, + ) -> typing.Callable[ + [cloudtasks.ResumeQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_tasks( + self, + ) -> typing.Callable[ + [cloudtasks.ListTasksRequest], + typing.Union[ + cloudtasks.ListTasksResponse, typing.Awaitable[cloudtasks.ListTasksResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_task( + self, + ) -> typing.Callable[ + [cloudtasks.GetTaskRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + @property + def create_task( + self, + ) -> typing.Callable[ + [cloudtasks.CreateTaskRequest], + typing.Union[gct_task.Task, typing.Awaitable[gct_task.Task]], + ]: + raise NotImplementedError() + + @property + def delete_task( + self, + ) -> typing.Callable[ + [cloudtasks.DeleteTaskRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def run_task( + self, + ) -> typing.Callable[ + [cloudtasks.RunTaskRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + +__all__ = ("CloudTasksTransport",) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py new file mode 100644 index 00000000..87b1c708 --- /dev/null +++ b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,755 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue(self) -> Callable[[cloudtasks.DeleteQueueRequest], empty.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + +__all__ = ("CloudTasksGrpcTransport",) diff --git a/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 00000000..450aeb0c --- /dev/null +++ b/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,767 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], Awaitable[cloudtasks.ListQueuesResponse] + ]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue( + self, + ) -> Callable[[cloudtasks.GetQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue( + self, + ) -> Callable[[cloudtasks.PurgeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue( + self, + ) -> Callable[[cloudtasks.PauseQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue( + self, + ) -> Callable[[cloudtasks.ResumeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], Awaitable[cloudtasks.ListTasksResponse] + ]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task( + self, + ) -> Callable[[cloudtasks.CreateTaskRequest], Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task( + self, + ) -> Callable[[cloudtasks.DeleteTaskRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + +__all__ = ("CloudTasksGrpcAsyncIOTransport",) diff --git a/google/cloud/tasks_v2/types.py b/google/cloud/tasks_v2/types.py deleted file mode 100644 index 3556d47f..00000000 --- a/google/cloud/tasks_v2/types.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.tasks_v2.proto import cloudtasks_pb2 -from google.cloud.tasks_v2.proto import queue_pb2 -from google.cloud.tasks_v2.proto import target_pb2 -from google.cloud.tasks_v2.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 -from google.type import expr_pb2 - - -_shared_modules = [ - iam_policy_pb2, - options_pb2, - policy_pb2, - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, - expr_pb2, -] - -_local_modules = [ - cloudtasks_pb2, - queue_pb2, - target_pb2, - task_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.tasks_v2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/tasks_v2/types/__init__.py b/google/cloud/tasks_v2/types/__init__.py new file mode 100644 index 00000000..d7484fb9 --- /dev/null +++ b/google/cloud/tasks_v2/types/__init__.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .target import ( + HttpRequest, + AppEngineHttpRequest, + AppEngineRouting, + OAuthToken, + OidcToken, +) +from .queue import ( + Queue, + RateLimits, + RetryConfig, + StackdriverLoggingConfig, +) +from .task import ( + Task, + Attempt, +) +from .cloudtasks import ( + ListQueuesRequest, + ListQueuesResponse, + GetQueueRequest, + CreateQueueRequest, + UpdateQueueRequest, + DeleteQueueRequest, + PurgeQueueRequest, + PauseQueueRequest, + ResumeQueueRequest, + ListTasksRequest, + ListTasksResponse, + GetTaskRequest, + CreateTaskRequest, + DeleteTaskRequest, + RunTaskRequest, +) + + +__all__ = ( + "HttpRequest", + "AppEngineHttpRequest", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + "Queue", + "RateLimits", + "RetryConfig", + "StackdriverLoggingConfig", + "Task", + "Attempt", + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "RunTaskRequest", +) diff --git a/google/cloud/tasks_v2/types/cloudtasks.py b/google/cloud/tasks_v2/types/cloudtasks.py new file mode 100644 index 00000000..aff86f04 --- /dev/null +++ b/google/cloud/tasks_v2/types/cloudtasks.py @@ -0,0 +1,478 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task as gct_task +from google.protobuf import field_mask_pb2 as field_mask # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", + manifest={ + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "RunTaskRequest", + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2.Queue] field can be used as a + filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "state: PAUSED". + + Note that using filters might cause fewer queues than the + requested page_size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2.ListQueuesRequest.filter] + while iterating through pages. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Attributes: + queues (Sequence[~.gct_queue.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues = proto.RepeatedField(proto.MESSAGE, number=1, message=gct_queue.Queue,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (~.gct_queue.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot be + the same as an existing queue. + """ + + parent = proto.Field(proto.STRING, number=1) + + queue = proto.Field(proto.MESSAGE, number=2, message=gct_queue.Queue,) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + + Attributes: + queue (~.gct_queue.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2.Queue.name] cannot + be changed. + update_mask (~.field_mask.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue = proto.Field(proto.MESSAGE, number=1, message=gct_queue.Queue,) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Attributes: + tasks (Sequence[~.gct_task.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] with + this value as the + [page_token][google.cloud.tasks.v2.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks = proto.RepeatedField(proto.MESSAGE, number=1, message=gct_task.Task,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (~.gct_task.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is not + specified then the system will generate a random unique task + id, which will be set in the task returned in the + [response][google.cloud.tasks.v2.Task.name]. + + If [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or executed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour after + the original task was deleted or executed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9days + after the original task was deleted or executed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + parent = proto.Field(proto.STRING, number=1) + + task = proto.Field(proto.MESSAGE, number=2, message=gct_task.Task,) + + response_view = proto.Field(proto.ENUM, number=3, enum=gct_task.Task.View,) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2/types/queue.py b/google/cloud/tasks_v2/types/queue.py new file mode 100644 index 00000000..cbc051bd --- /dev/null +++ b/google/cloud/tasks_v2/types/queue.py @@ -0,0 +1,394 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2.types import target +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", + manifest={"Queue", "RateLimits", "RetryConfig", "StackdriverLoggingConfig",}, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + queue types, and others. + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_routing_override (~.target.AppEngineRouting): + Overrides for [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + These settings apply only to [App Engine + tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this + queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are + not affected. + + If set, ``app_engine_routing_override`` is used for all [App + Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in + the queue, no matter what the setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + rate_limits (~.queue.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] and + [retry_config][google.cloud.tasks.v2.Queue.retry_config] are + related because they both control task attempts. However + they control task attempts in different ways: + + - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + + The queue's actual dispatch rate is the result of: + + - Number of tasks in the queue + - User-specified throttling: + [rate_limits][google.cloud.tasks.v2.Queue.rate_limits], + [retry_config][google.cloud.tasks.v2.Queue.retry_config], + and the [queue's + state][google.cloud.tasks.v2.Queue.state]. + - System throttling due to ``429`` (Too Many Requests) or + ``503`` (Service Unavailable) responses from the worker, + high error rates, or to smooth sudden large traffic + spikes. + retry_config (~.queue.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (~.queue.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by called + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (~.timestamp.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2.Task.create_time] before + this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + stackdriver_logging_config (~.queue.StackdriverLoggingConfig): + Configuration options for writing logs to `Stackdriver + Logging `__. If this + field is unset, then no logs are written. + """ + + class State(proto.Enum): + r"""State of the queue.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + name = proto.Field(proto.STRING, number=1) + + app_engine_routing_override = proto.Field( + proto.MESSAGE, number=2, message=target.AppEngineRouting, + ) + + rate_limits = proto.Field(proto.MESSAGE, number=3, message="RateLimits",) + + retry_config = proto.Field(proto.MESSAGE, number=4, message="RetryConfig",) + + state = proto.Field(proto.ENUM, number=5, enum=State,) + + purge_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + stackdriver_logging_config = proto.Field( + proto.MESSAGE, number=9, message="StackdriverLoggingConfig", + ) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask], will run a task + even if the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits]. + + Attributes: + max_dispatches_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - The maximum allowed value is 500. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + Output only. The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + + Cloud Tasks will pick the value of ``max_burst_size`` based + on the value of + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + + For queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + Since ``max_burst_size`` is output only, if + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] + is called on a queue created by ``queue.yaml/xml``, + ``max_burst_size`` will be reset based on the value of + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second], + regardless of whether + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second] + is updated. + max_concurrent_dispatches (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_dispatches_per_second = proto.Field(proto.DOUBLE, number=1) + + max_burst_size = proto.Field(proto.INT32, number=2) + + max_concurrent_dispatches = proto.Field(proto.INT32, number=3) + + +class RetryConfig(proto.Message): + r"""Retry config. + These settings determine when a failed task attempt is retried. + + Attributes: + max_attempts (int): + Number of attempts per task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be >= -1. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + -1 indicates unlimited attempts. + + This field has the same meaning as `task_retry_limit in + queue.yaml/xml `__. + max_retry_duration (~.duration.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (~.duration.Duration): + A task will be + [scheduled][google.cloud.tasks.v2.Task.schedule_time] for + retry between + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies + that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (~.duration.Duration): + A task will be + [scheduled][google.cloud.tasks.v2.Task.schedule_time] for + retry between + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies + that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries retries at intervals of + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts = proto.Field(proto.INT32, number=1) + + max_retry_duration = proto.Field( + proto.MESSAGE, number=2, message=duration.Duration, + ) + + min_backoff = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + + max_backoff = proto.Field(proto.MESSAGE, number=4, message=duration.Duration,) + + max_doublings = proto.Field(proto.INT32, number=5) + + +class StackdriverLoggingConfig(proto.Message): + r"""Configuration options for writing logs to `Stackdriver + Logging `__. + + Attributes: + sampling_ratio (float): + Specifies the fraction of operations to write to + `Stackdriver + Logging `__. This + field may contain any value between 0.0 and 1.0, inclusive. + 0.0 is the default and means that no operations are logged. + """ + + sampling_ratio = proto.Field(proto.DOUBLE, number=1) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2/types/target.py b/google/cloud/tasks_v2/types/target.py new file mode 100644 index 00000000..5b71f458 --- /dev/null +++ b/google/cloud/tasks_v2/types/target.py @@ -0,0 +1,504 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", + manifest={ + "HttpMethod", + "HttpRequest", + "AppEngineHttpRequest", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to deliver the task.""" + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class HttpRequest(proto.Message): + r"""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the + worker or the redirected worker acknowledges the task by returning a + successful HTTP response code ([``200`` - ``299``]), the task will + be removed from the queue. If any other HTTP response code is + returned or no response is received, the task will be retried + according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2.Queue.retry_config], [rate + limits][google.cloud.tasks.v2.Queue.rate_limits], and the + [queue's state][google.cloud.tasks.v2.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff + specified in [rate + limits][google.cloud.tasks.v2.Queue.rate_limits] will be used. + But if the worker returns ``429`` (Too Many Requests), ``503`` + (Service Unavailable), or the rate of errors is high, Cloud Tasks + will use a higher backoff rate. The retry specified in the + ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden increases in + traffic, dispatches ramp up slowly when the queue is newly + created or idle and if large numbers of tasks suddenly become + available to dispatch (due to spikes in create task rates, the + queue being unpaused, or many tasks that are scheduled at the + same time). + + Attributes: + url (str): + Required. The full url path that the request will be sent + to. + + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode + some characters for safety and compatibility. The maximum + allowed URL length is 2083 characters after encoding. + + The ``Location`` header response from a redirect response + [``300`` - ``399``] may be followed. The redirect is not + counted as a separate attempt. + http_method (~.target.HttpMethod): + The HTTP method to use for the request. The + default is POST. + headers (Sequence[~.target.HttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + - X-Google-\*: Google use only. + - X-AppEngine-\*: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. + body (bytes): + HTTP request body. + + A request body is allowed only if the [HTTP + method][google.cloud.tasks.v2.HttpRequest.http_method] is + POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + oauth_token (~.target.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + oidc_token (~.target.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + """ + + url = proto.Field(proto.STRING, number=1) + + http_method = proto.Field(proto.ENUM, number=2, enum="HttpMethod",) + + headers = proto.MapField(proto.STRING, proto.STRING, number=3) + + body = proto.Field(proto.BYTES, number=4) + + oauth_token = proto.Field( + proto.MESSAGE, number=5, oneof="authorization_header", message="OAuthToken", + ) + + oidc_token = proto.Field( + proto.MESSAGE, number=6, oneof="authorization_header", message="OidcToken", + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] used + to construct the URL that the task is delivered to can be set at the + queue-level or task-level: + + - If [app_engine_routing_override is set on the + queue][Queue.app_engine_routing_override], this value is used for + all tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` [host][google.cloud.tasks.v2.AppEngineRouting.host] + ``+`` + [relative_uri][google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][google.cloud.tasks.v2.Task.dispatch_deadline]. Failed + tasks will be retried according to the [retry + configuration][google.cloud.tasks.v2.Queue.retry_config]. ``503`` + (Service Unavailable) is considered an App Engine system error + instead of an application error and will cause Cloud Tasks' traffic + congestion control to temporarily throttle the queue's dispatches. + Unlike other types of task targets, a ``429`` (Too Many Requests) + response from an app handler does not cause traffic congestion + control to throttle the queue. + + Attributes: + http_method (~.target.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt will fail with error code 405 + (Method Not Allowed). See `Writing a push task request + handler `__ + and the documentation for the request handlers in the + language your app is written in e.g. `Python Request + Handler `__. + app_engine_routing (~.target.AppEngineRouting): + Task-level setting for App Engine routing. + + - If [app_engine_routing_override is set on the + queue][Queue.app_engine_routing_override], this value is + used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][AppEngineHttpRequest.app_engine_routing]. + relative_uri (str): + The relative URI. + The relative URI must begin with "/" and must be + a valid HTTP relative URI. It can contain a path + and query string arguments. If the relative URI + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (Sequence[~.target.AppEngineHttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [body][google.cloud.tasks.v2.AppEngineHttpRequest.body], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-\*`` + - ``X-AppEngine-\*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + documentation. + body (bytes): + HTTP request body. + + A request body is allowed only if the HTTP method is POST or + PUT. It is an error to set a body on a task with an + incompatible [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + """ + + http_method = proto.Field(proto.ENUM, number=1, enum="HttpMethod",) + + app_engine_routing = proto.Field( + proto.MESSAGE, number=2, message="AppEngineRouting", + ) + + relative_uri = proto.Field(proto.STRING, number=3) + + headers = proto.MapField(proto.STRING, proto.STRING, number=4) + + body = proto.Field(proto.BYTES, number=5) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Using [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable into + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable, then + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable into + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable, then + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + The host is constructed from the domain name of the app + associated with the queue's project ID (for example + .appspot.com), and the + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + Tasks which were created using the App Engine SDK might have + a custom domain name. + + For more information, see `How Requests are + Routed `__. + """ + + service = proto.Field(proto.STRING, number=1) + + version = proto.Field(proto.STRING, number=2) + + instance = proto.Field(proto.STRING, number=3) + + host = proto.Field(proto.STRING, number=4) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email = proto.Field(proto.STRING, number=1) + + scope = proto.Field(proto.STRING, number=2) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email = proto.Field(proto.STRING, number=1) + + audience = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2/types/task.py b/google/cloud/tasks_v2/types/task.py new file mode 100644 index 00000000..fe676567 --- /dev/null +++ b/google/cloud/tasks_v2/types/task.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2.types import target +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", manifest={"Task", "Attempt",}, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (~.target.AppEngineHttpRequest): + HTTP request that is sent to the App Engine app handler. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] + set. + http_request (~.target.HttpRequest): + HTTP request that is sent to the worker. + + An HTTP task is a task that has + [HttpRequest][google.cloud.tasks.v2.HttpRequest] set. + schedule_time (~.timestamp.Timestamp): + The time when the task is scheduled to be attempted or + retried. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (~.timestamp.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + dispatch_deadline (~.duration.Duration): + The deadline for requests sent to the worker. If the worker + does not respond by this deadline then the request is + cancelled and the attempt is marked as a + ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the + task according to the + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + Note that when the request is cancelled, Cloud Tasks will + stop listing for the response, but whether the worker stops + processing depends on the worker. For example, if the worker + is stuck, it may not react to cancelled requests. + + The default and maximum values depend on the type of + request: + + - For [HTTP tasks][google.cloud.tasks.v2.HttpRequest], the + default is 10 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine + tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 + indicates that the request has the default deadline. The + default deadline depends on the `scaling + type `__ + of the service: 10 minutes for standard apps with + automatic scaling, 24 hours for standard apps with manual + and basic scaling, and 60 minutes for flex apps. If the + request deadline is set, it must be in the interval [15 + seconds, 24 hours 15 seconds]. Regardless of the task's + ``dispatch_deadline``, the app handler will not run for + longer than than the service's timeout. We recommend + setting the ``dispatch_deadline`` to at most a few + seconds more than the app handler's timeout. For more + information see + `Timeouts `__. + + ``dispatch_deadline`` will be truncated to the nearest + millisecond. The deadline is an approximate deadline. + dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + response_count (int): + Output only. The number of attempts which + have received a response. + first_attempt (~.task.Attempt): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2.Attempt.dispatch_time] + will be set. The other + [Attempt][google.cloud.tasks.v2.Attempt] information is not + retained by Cloud Tasks. + last_attempt (~.task.Attempt): + Output only. The status of the task's last + attempt. + view (~.task.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] has been returned. + """ + + class View(proto.Enum): + r"""The view specifies a subset of [Task][google.cloud.tasks.v2.Task] + data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name = proto.Field(proto.STRING, number=1) + + app_engine_http_request = proto.Field( + proto.MESSAGE, + number=2, + oneof="message_type", + message=target.AppEngineHttpRequest, + ) + + http_request = proto.Field( + proto.MESSAGE, number=3, oneof="message_type", message=target.HttpRequest, + ) + + schedule_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + dispatch_deadline = proto.Field(proto.MESSAGE, number=6, message=duration.Duration,) + + dispatch_count = proto.Field(proto.INT32, number=7) + + response_count = proto.Field(proto.INT32, number=8) + + first_attempt = proto.Field(proto.MESSAGE, number=9, message="Attempt",) + + last_attempt = proto.Field(proto.MESSAGE, number=10, message="Attempt",) + + view = proto.Field(proto.ENUM, number=11, enum=View,) + + +class Attempt(proto.Message): + r"""The status of a task attempt. + + Attributes: + schedule_time (~.timestamp.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (~.timestamp.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (~.timestamp.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (~.status.Status): + Output only. The response from the worker for this attempt. + + If ``response_time`` is unset, then the task has not been + attempted or is currently running and the + ``response_status`` field is meaningless. + """ + + schedule_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + dispatch_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + response_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + response_status = proto.Field(proto.MESSAGE, number=4, message=status.Status,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta2/__init__.py b/google/cloud/tasks_v2beta2/__init__.py index d6aba004..532b3358 100644 --- a/google/cloud/tasks_v2beta2/__init__.py +++ b/google/cloud/tasks_v2beta2/__init__.py @@ -1,45 +1,87 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.tasks_v2beta2 import types -from google.cloud.tasks_v2beta2.gapic import cloud_tasks_client -from google.cloud.tasks_v2beta2.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class CloudTasksClient(cloud_tasks_client.CloudTasksClient): - __doc__ = cloud_tasks_client.CloudTasksClient.__doc__ - enums = enums +from .services.cloud_tasks import CloudTasksClient +from .types.cloudtasks import AcknowledgeTaskRequest +from .types.cloudtasks import CancelLeaseRequest +from .types.cloudtasks import CreateQueueRequest +from .types.cloudtasks import CreateTaskRequest +from .types.cloudtasks import DeleteQueueRequest +from .types.cloudtasks import DeleteTaskRequest +from .types.cloudtasks import GetQueueRequest +from .types.cloudtasks import GetTaskRequest +from .types.cloudtasks import LeaseTasksRequest +from .types.cloudtasks import LeaseTasksResponse +from .types.cloudtasks import ListQueuesRequest +from .types.cloudtasks import ListQueuesResponse +from .types.cloudtasks import ListTasksRequest +from .types.cloudtasks import ListTasksResponse +from .types.cloudtasks import PauseQueueRequest +from .types.cloudtasks import PurgeQueueRequest +from .types.cloudtasks import RenewLeaseRequest +from .types.cloudtasks import ResumeQueueRequest +from .types.cloudtasks import RunTaskRequest +from .types.cloudtasks import UpdateQueueRequest +from .types.queue import Queue +from .types.queue import RateLimits +from .types.queue import RetryConfig +from .types.target import AppEngineHttpRequest +from .types.target import AppEngineHttpTarget +from .types.target import AppEngineRouting +from .types.target import HttpMethod +from .types.target import PullMessage +from .types.target import PullTarget +from .types.task import AttemptStatus +from .types.task import Task +from .types.task import TaskStatus __all__ = ( - "enums", - "types", + "AcknowledgeTaskRequest", + "AppEngineHttpRequest", + "AppEngineHttpTarget", + "AppEngineRouting", + "AttemptStatus", + "CancelLeaseRequest", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "HttpMethod", + "LeaseTasksRequest", + "LeaseTasksResponse", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "PauseQueueRequest", + "PullMessage", + "PullTarget", + "PurgeQueueRequest", + "Queue", + "RateLimits", + "RenewLeaseRequest", + "ResumeQueueRequest", + "RetryConfig", + "RunTaskRequest", + "Task", + "TaskStatus", + "UpdateQueueRequest", "CloudTasksClient", ) diff --git a/google/cloud/tasks_v2beta2/gapic/__init__.py b/google/cloud/tasks_v2beta2/gapic/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py b/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py deleted file mode 100644 index 84279e1a..00000000 --- a/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py +++ /dev/null @@ -1,2147 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.tasks.v2beta2 CloudTasks API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.tasks_v2beta2.gapic import cloud_tasks_client_config -from google.cloud.tasks_v2beta2.gapic import enums -from google.cloud.tasks_v2beta2.gapic.transports import cloud_tasks_grpc_transport -from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 -from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2_grpc -from google.cloud.tasks_v2beta2.proto import queue_pb2 -from google.cloud.tasks_v2beta2.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-tasks",).version - - -class CloudTasksClient(object): - """ - Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - SERVICE_ADDRESS = "cloudtasks.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.tasks.v2beta2.CloudTasks" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def location_path(cls, project, location): - """Return a fully-qualified location string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}", - project=project, - location=location, - ) - - @classmethod - def queue_path(cls, project, location, queue): - """Return a fully-qualified queue string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/queues/{queue}", - project=project, - location=location, - queue=queue, - ) - - @classmethod - def task_path(cls, project, location, queue, task): - """Return a fully-qualified task string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}", - project=project, - location=location, - queue=queue, - task=task, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.CloudTasksGrpcTransport, - Callable[[~.Credentials, type], ~.CloudTasksGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = cloud_tasks_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=cloud_tasks_grpc_transport.CloudTasksGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = cloud_tasks_grpc_transport.CloudTasksGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_queues( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists queues. - - Queues are returned in lexicographical order. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_queues(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_queues(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter_ (str): ``filter`` can be used to specify a subset of queues. Any ``Queue`` - field can be used as a filter and several operators as supported. For - example: ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver's Advanced Logs - Filters `__. - - Sample filter "app_engine_http_target: \*". - - Note that using filters might cause fewer queues than the requested_page - size to be returned. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.tasks_v2beta2.types.Queue` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_queues" not in self._inner_api_calls: - self._inner_api_calls[ - "list_queues" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_queues, - default_retry=self._method_configs["ListQueues"].retry, - default_timeout=self._method_configs["ListQueues"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ListQueuesRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_queues"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="queues", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a queue. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.get_queue(name) - - Args: - name (str): Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "get_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_queue, - default_retry=self._method_configs["GetQueue"].retry, - default_timeout=self._method_configs["GetQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.GetQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_queue( - self, - parent, - queue, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # TODO: Initialize `queue`: - >>> queue = {} - >>> - >>> response = client.create_queue(parent, queue) - - Args: - parent (str): Required. The location name in which the queue will be created. For - example: ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling Cloud Tasks' - implementation of ``ListLocations``. - queue (Union[dict, ~google.cloud.tasks_v2beta2.types.Queue]): Required. The queue to create. - - ``Queue's name`` cannot be the same as an existing queue. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Queue` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "create_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_queue, - default_retry=self._method_configs["CreateQueue"].retry, - default_timeout=self._method_configs["CreateQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_queue( - self, - queue, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a queue. - - This method creates the queue if it does not exist and updates the queue - if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> # TODO: Initialize `queue`: - >>> queue = {} - >>> - >>> response = client.update_queue(queue) - - Args: - queue (Union[dict, ~google.cloud.tasks_v2beta2.types.Queue]): Required. The queue to create or update. - - The queue's ``name`` must be specified. - - Output only fields cannot be modified using UpdateQueue. Any value - specified for an output only field will be ignored. The queue's ``name`` - cannot be changed. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Queue` - update_mask (Union[dict, ~google.cloud.tasks_v2beta2.types.FieldMask]): A mask used to specify which fields of the queue are being updated. - - If empty, then all fields will be updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "update_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_queue, - default_retry=self._method_configs["UpdateQueue"].retry, - default_timeout=self._method_configs["UpdateQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.UpdateQueueRequest( - queue=queue, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("queue.name", queue.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> client.delete_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_queue, - default_retry=self._method_configs["DeleteQueue"].retry, - default_timeout=self._method_configs["DeleteQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.DeleteQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def purge_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.purge_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "purge_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "purge_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.purge_queue, - default_retry=self._method_configs["PurgeQueue"].retry, - default_timeout=self._method_configs["PurgeQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.PurgeQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["purge_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def pause_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks until - the queue is resumed via ``ResumeQueue``. Tasks can still be added when - the queue is paused. A queue is paused if its ``state`` is ``PAUSED``. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.pause_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "pause_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "pause_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pause_queue, - default_retry=self._method_configs["PauseQueue"].retry, - default_timeout=self._method_configs["PauseQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.PauseQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["pause_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def resume_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Resume a queue. - - This method resumes a queue after it has been ``PAUSED`` or - ``DISABLED``. The state of a queue is stored in the queue's ``state``; - after calling this method it will be set to ``RUNNING``. - - WARNING: Resuming many high-QPS queues at the same time can lead to - target overloading. If you are resuming high-QPS queues, follow the - 500/50/5 pattern described in `Managing Cloud Tasks Scaling - Risks `__. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.resume_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "resume_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "resume_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.resume_queue, - default_retry=self._method_configs["ResumeQueue"].retry, - default_timeout=self._method_configs["ResumeQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ResumeQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["resume_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a ``Queue``. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.tasks_v2beta2.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy for a ``Queue``. Replaces any - existing policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.tasks_v2beta2.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on a ``Queue``. If the - resource does not exist, this will return an empty set of permissions, - not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_tasks( - self, - parent, - response_view=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the tasks in a queue. - - By default, only the ``BASIC`` view is retrieved due to performance - considerations; ``response_view`` controls the subset of information - which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> # Iterate over all results - >>> for element in client.list_tasks(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_tasks(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.tasks_v2beta2.types.Task` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_tasks" not in self._inner_api_calls: - self._inner_api_calls[ - "list_tasks" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_tasks, - default_retry=self._method_configs["ListTasks"].retry, - default_timeout=self._method_configs["ListTasks"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ListTasksRequest( - parent=parent, response_view=response_view, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_tasks"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="tasks", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_task( - self, - name, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a task. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> response = client.get_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_task" not in self._inner_api_calls: - self._inner_api_calls[ - "get_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_task, - default_retry=self._method_configs["GetTask"].retry, - default_timeout=self._method_configs["GetTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.GetTaskRequest(name=name, response_view=response_view,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_task( - self, - parent, - task, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - - For ``App Engine queues``, the maximum task size is 100KB. - - For ``pull queues``, the maximum task size is 1MB. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> # TODO: Initialize `task`: - >>> task = {} - >>> - >>> response = client.create_task(parent, task) - - Args: - parent (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - task (Union[dict, ~google.cloud.tasks_v2beta2.types.Task]): Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task ``name``. If a name is not - specified then the system will generate a random unique task id, which - will be set in the task returned in the ``response``. - - If ``schedule_time`` is not set or is in the past then Cloud Tasks will - set it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task de-duplication. If a task's - ID is identical to that of an existing task or a task that was deleted - or completed recently then the call will fail with ``ALREADY_EXISTS``. - If the task's queue was created using Cloud Tasks, then another task - with the same name can't be created for ~1hour after the original task - was deleted or completed. If the task's queue was created using - queue.yaml or queue.xml, then another task with the same name can't be - created for ~9days after the original task was deleted or completed. - - Because there is an extra lookup cost to identify duplicate task names, - these ``CreateTask`` calls have significantly increased latency. Using - hashed strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have sequential - prefixes, for example using a timestamp, causes an increase in latency - and error rates in all task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store and serve tasks - efficiently. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Task` - response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_task" not in self._inner_api_calls: - self._inner_api_calls[ - "create_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_task, - default_retry=self._method_configs["CreateTask"].retry, - default_timeout=self._method_configs["CreateTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.CreateTaskRequest( - parent=parent, task=task, response_view=response_view, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_task( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has completed successfully or permanently - failed. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> client.delete_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_task" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_task, - default_retry=self._method_configs["DeleteTask"].retry, - default_timeout=self._method_configs["DeleteTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.DeleteTaskRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def lease_tasks( - self, - parent, - lease_duration, - max_tasks=None, - response_view=None, - filter_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Leases tasks from a pull queue for ``lease_duration``. - - This method is invoked by the worker to obtain a lease. The worker must - acknowledge the task via ``AcknowledgeTask`` after they have performed - the work associated with the task. - - The ``payload`` is intended to store data that the worker needs to - perform the work associated with the task. To return the payloads in the - ``response``, set ``response_view`` to ``FULL``. - - A maximum of 10 qps of ``LeaseTasks`` requests are allowed per queue. - ``RESOURCE_EXHAUSTED`` is returned when this limit is exceeded. - ``RESOURCE_EXHAUSTED`` is also returned when - ``max_tasks_dispatched_per_second`` is exceeded. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> # TODO: Initialize `lease_duration`: - >>> lease_duration = {} - >>> - >>> response = client.lease_tasks(parent, lease_duration) - - Args: - parent (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - lease_duration (Union[dict, ~google.cloud.tasks_v2beta2.types.Duration]): Required. The duration of the lease. - - Each task returned in the ``response`` will have its ``schedule_time`` - set to the current time plus the ``lease_duration``. The task is leased - until its ``schedule_time``; thus, the task will not be returned to - another ``LeaseTasks`` call before its ``schedule_time``. - - After the worker has successfully finished the work associated with the - task, the worker must call via ``AcknowledgeTask`` before the - ``schedule_time``. Otherwise the task will be returned to a later - ``LeaseTasks`` call so that another worker can retry it. - - The maximum lease duration is 1 week. ``lease_duration`` will be - truncated to the nearest second. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Duration` - max_tasks (int): The maximum number of tasks to lease. - - The system will make a best effort to return as close to as - ``max_tasks`` as possible. - - The largest that ``max_tasks`` can be is 1000. - - The maximum total size of a ``lease tasks response`` is 32 MB. If the - sum of all task sizes requested reaches this limit, fewer tasks than - requested are returned. - response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - filter_ (str): ``filter`` can be used to specify a subset of tasks to lease. - - When ``filter`` is set to ``tag=`` then the ``response`` will - contain only tasks whose ``tag`` is equal to ````. ```` - must be less than 500 characters. - - When ``filter`` is set to ``tag_function=oldest_tag()``, only tasks - which have the same tag as the task with the oldest ``schedule_time`` - will be returned. - - Grammar Syntax: - - - ``filter = "tag=" tag | "tag_function=" function`` - - - ``tag = string`` - - - ``function = "oldest_tag()"`` - - The ``oldest_tag()`` function returns tasks which have the same tag as - the oldest task (ordered by schedule time). - - SDK compatibility: Although the SDK allows tags to be either string or - `bytes `__, - only UTF-8 encoded tags can be used in Cloud Tasks. Tag which aren't - UTF-8 encoded can't be used in the ``filter`` and the task's ``tag`` - will be displayed as empty in Cloud Tasks. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.LeaseTasksResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "lease_tasks" not in self._inner_api_calls: - self._inner_api_calls[ - "lease_tasks" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.lease_tasks, - default_retry=self._method_configs["LeaseTasks"].retry, - default_timeout=self._method_configs["LeaseTasks"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.LeaseTasksRequest( - parent=parent, - lease_duration=lease_duration, - max_tasks=max_tasks, - response_view=response_view, - filter=filter_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["lease_tasks"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def acknowledge_task( - self, - name, - schedule_time, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Acknowledges a pull task. - - The worker, that is, the entity that ``leased`` this task must call this - method to indicate that the work associated with the task has finished. - - The worker must acknowledge a task within the ``lease_duration`` or the - lease will expire and the task will become available to be leased again. - After the task is acknowledged, it will not be returned by a later - ``LeaseTasks``, ``GetTask``, or ``ListTasks``. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> # TODO: Initialize `schedule_time`: - >>> schedule_time = {} - >>> - >>> client.acknowledge_task(name, schedule_time) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time (Union[dict, ~google.cloud.tasks_v2beta2.types.Timestamp]): Required. The task's current schedule time, available in the - ``schedule_time`` returned by ``LeaseTasks`` response or ``RenewLease`` - response. This restriction is to ensure that your worker currently holds - the lease. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "acknowledge_task" not in self._inner_api_calls: - self._inner_api_calls[ - "acknowledge_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.acknowledge_task, - default_retry=self._method_configs["AcknowledgeTask"].retry, - default_timeout=self._method_configs["AcknowledgeTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.AcknowledgeTaskRequest( - name=name, schedule_time=schedule_time, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["acknowledge_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def renew_lease( - self, - name, - schedule_time, - lease_duration, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Renew the current lease of a pull task. - - The worker can use this method to extend the lease by a new duration, - starting from now. The new task lease will be returned in the task's - ``schedule_time``. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> # TODO: Initialize `schedule_time`: - >>> schedule_time = {} - >>> - >>> # TODO: Initialize `lease_duration`: - >>> lease_duration = {} - >>> - >>> response = client.renew_lease(name, schedule_time, lease_duration) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time (Union[dict, ~google.cloud.tasks_v2beta2.types.Timestamp]): Required. The task's current schedule time, available in the - ``schedule_time`` returned by ``LeaseTasks`` response or ``RenewLease`` - response. This restriction is to ensure that your worker currently holds - the lease. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` - lease_duration (Union[dict, ~google.cloud.tasks_v2beta2.types.Duration]): Required. The desired new lease duration, starting from now. - - The maximum lease duration is 1 week. ``lease_duration`` will be - truncated to the nearest second. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Duration` - response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "renew_lease" not in self._inner_api_calls: - self._inner_api_calls[ - "renew_lease" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.renew_lease, - default_retry=self._method_configs["RenewLease"].retry, - default_timeout=self._method_configs["RenewLease"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.RenewLeaseRequest( - name=name, - schedule_time=schedule_time, - lease_duration=lease_duration, - response_view=response_view, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["renew_lease"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def cancel_lease( - self, - name, - schedule_time, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Cancel a pull task's lease. - - The worker can use this method to cancel a task's lease by setting its - ``schedule_time`` to now. This will make the task available to be leased - to the next caller of ``LeaseTasks``. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> # TODO: Initialize `schedule_time`: - >>> schedule_time = {} - >>> - >>> response = client.cancel_lease(name, schedule_time) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time (Union[dict, ~google.cloud.tasks_v2beta2.types.Timestamp]): Required. The task's current schedule time, available in the - ``schedule_time`` returned by ``LeaseTasks`` response or ``RenewLease`` - response. This restriction is to ensure that your worker currently holds - the lease. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` - response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "cancel_lease" not in self._inner_api_calls: - self._inner_api_calls[ - "cancel_lease" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.cancel_lease, - default_retry=self._method_configs["CancelLease"].retry, - default_timeout=self._method_configs["CancelLease"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.CancelLeaseRequest( - name=name, schedule_time=schedule_time, response_view=response_view, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["cancel_lease"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def run_task( - self, - name, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its ``RateLimits`` or - is ``PAUSED``. - - This command is meant to be used for manual debugging. For example, - ``RunTask`` can be used to retry a failed task after a fix has been made - or to manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the ``status`` after the task is dispatched but before the task - is received by its target. - - If Cloud Tasks receives a successful response from the task's target, - then the task will be deleted; otherwise the task's ``schedule_time`` - will be reset to the time that ``RunTask`` was called plus the retry - delay specified in the queue's ``RetryConfig``. - - ``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has - already succeeded or permanently failed. - - ``RunTask`` cannot be called on a ``pull task``. - - Example: - >>> from google.cloud import tasks_v2beta2 - >>> - >>> client = tasks_v2beta2.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> response = client.run_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "run_task" not in self._inner_api_calls: - self._inner_api_calls[ - "run_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_task, - default_retry=self._method_configs["RunTask"].retry, - default_timeout=self._method_configs["RunTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.RunTaskRequest(name=name, response_view=response_view,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["run_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py b/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py deleted file mode 100644 index 662a99e8..00000000 --- a/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py +++ /dev/null @@ -1,142 +0,0 @@ -config = { - "interfaces": { - "google.cloud.tasks.v2beta2.CloudTasks": { - "retry_codes": { - "retry_policy_1_codes": ["UNAVAILABLE", "DEADLINE_EXCEEDED"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 10000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 10000, - }, - }, - "methods": { - "ListQueues": { - "timeout_millis": 15000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetQueue": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UpdateQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteQueue": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "PurgeQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "PauseQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ResumeQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetIamPolicy": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "SetIamPolicy": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "TestIamPermissions": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListTasks": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetTask": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateTask": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteTask": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "LeaseTasks": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "AcknowledgeTask": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "RenewLease": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "CancelLease": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "RunTask": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - }, - } - } -} diff --git a/google/cloud/tasks_v2beta2/gapic/enums.py b/google/cloud/tasks_v2beta2/gapic/enums.py deleted file mode 100644 index d5a7094c..00000000 --- a/google/cloud/tasks_v2beta2/gapic/enums.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class HttpMethod(enum.IntEnum): - """ - The HTTP method used to execute the task. - - Attributes: - HTTP_METHOD_UNSPECIFIED (int): HTTP method unspecified - POST (int): HTTP POST - GET (int): HTTP GET - HEAD (int): HTTP HEAD - PUT (int): HTTP PUT - DELETE (int): HTTP DELETE - """ - - HTTP_METHOD_UNSPECIFIED = 0 - POST = 1 - GET = 2 - HEAD = 3 - PUT = 4 - DELETE = 5 - - -class Queue(object): - class State(enum.IntEnum): - """ - State of the queue. - - Attributes: - STATE_UNSPECIFIED (int): Unspecified state. - RUNNING (int): The queue is running. Tasks can be dispatched. - - If the queue was created using Cloud Tasks and the queue has had no - activity (method calls or task dispatches) for 30 days, the queue may - take a few minutes to re-activate. Some method calls may return - ``NOT_FOUND`` and tasks may not be dispatched for a few minutes until - the queue has been re-activated. - PAUSED (int): Tasks are paused by the user. If the queue is paused then Cloud - Tasks will stop delivering tasks from it, but more tasks can still be - added to it by the user. When a pull queue is paused, all ``LeaseTasks`` - calls will return a ``FAILED_PRECONDITION``. - DISABLED (int): The queue is disabled. - - A queue becomes ``DISABLED`` when - `queue.yaml `__ - or - `queue.xml `__ - is uploaded which does not contain the queue. You cannot directly - disable a queue. - - When a queue is disabled, tasks can still be added to a queue but the - tasks are not dispatched and ``LeaseTasks`` calls return a - ``FAILED_PRECONDITION`` error. - - To permanently delete this queue and all of its tasks, call - ``DeleteQueue``. - """ - - STATE_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - DISABLED = 3 - - -class Task(object): - class View(enum.IntEnum): - """ - The view specifies a subset of ``Task`` data. - - When a task is returned in a response, not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Attributes: - VIEW_UNSPECIFIED (int): Unspecified. Defaults to BASIC. - BASIC (int): The basic view omits fields which can be large or can contain - sensitive data. - - This view does not include the (``payload in AppEngineHttpRequest`` and - ``payload in PullMessage``). These payloads are desirable to return only - when needed, because they can be large and because of the sensitivity of - the data that you choose to store in it. - FULL (int): All information is returned. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `__ permission on the - ``Queue`` resource. - """ - - VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 diff --git a/google/cloud/tasks_v2beta2/gapic/transports/__init__.py b/google/cloud/tasks_v2beta2/gapic/transports/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2beta2/gapic/transports/cloud_tasks_grpc_transport.py b/google/cloud/tasks_v2beta2/gapic/transports/cloud_tasks_grpc_transport.py deleted file mode 100644 index 426b7a23..00000000 --- a/google/cloud/tasks_v2beta2/gapic/transports/cloud_tasks_grpc_transport.py +++ /dev/null @@ -1,512 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2_grpc - - -class CloudTasksGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.tasks.v2beta2 CloudTasks API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="cloudtasks.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "cloud_tasks_stub": cloudtasks_pb2_grpc.CloudTasksStub(channel), - } - - @classmethod - def create_channel( - cls, address="cloudtasks.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_queues(self): - """Return the gRPC stub for :meth:`CloudTasksClient.list_queues`. - - Lists queues. - - Queues are returned in lexicographical order. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ListQueues - - @property - def get_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_queue`. - - Gets a queue. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetQueue - - @property - def create_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.create_queue`. - - Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].CreateQueue - - @property - def update_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.update_queue`. - - Updates a queue. - - This method creates the queue if it does not exist and updates the queue - if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].UpdateQueue - - @property - def delete_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.delete_queue`. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].DeleteQueue - - @property - def purge_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.purge_queue`. - - Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].PurgeQueue - - @property - def pause_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.pause_queue`. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks until - the queue is resumed via ``ResumeQueue``. Tasks can still be added when - the queue is paused. A queue is paused if its ``state`` is ``PAUSED``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].PauseQueue - - @property - def resume_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.resume_queue`. - - Resume a queue. - - This method resumes a queue after it has been ``PAUSED`` or - ``DISABLED``. The state of a queue is stored in the queue's ``state``; - after calling this method it will be set to ``RUNNING``. - - WARNING: Resuming many high-QPS queues at the same time can lead to - target overloading. If you are resuming high-QPS queues, follow the - 500/50/5 pattern described in `Managing Cloud Tasks Scaling - Risks `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ResumeQueue - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_iam_policy`. - - Gets the access control policy for a ``Queue``. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetIamPolicy - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`CloudTasksClient.set_iam_policy`. - - Sets the access control policy for a ``Queue``. Replaces any - existing policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].SetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`CloudTasksClient.test_iam_permissions`. - - Returns permissions that a caller has on a ``Queue``. If the - resource does not exist, this will return an empty set of permissions, - not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].TestIamPermissions - - @property - def list_tasks(self): - """Return the gRPC stub for :meth:`CloudTasksClient.list_tasks`. - - Lists the tasks in a queue. - - By default, only the ``BASIC`` view is retrieved due to performance - considerations; ``response_view`` controls the subset of information - which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ListTasks - - @property - def get_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_task`. - - Gets a task. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetTask - - @property - def create_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.create_task`. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - - For ``App Engine queues``, the maximum task size is 100KB. - - For ``pull queues``, the maximum task size is 1MB. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].CreateTask - - @property - def delete_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.delete_task`. - - Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has completed successfully or permanently - failed. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].DeleteTask - - @property - def lease_tasks(self): - """Return the gRPC stub for :meth:`CloudTasksClient.lease_tasks`. - - Leases tasks from a pull queue for ``lease_duration``. - - This method is invoked by the worker to obtain a lease. The worker must - acknowledge the task via ``AcknowledgeTask`` after they have performed - the work associated with the task. - - The ``payload`` is intended to store data that the worker needs to - perform the work associated with the task. To return the payloads in the - ``response``, set ``response_view`` to ``FULL``. - - A maximum of 10 qps of ``LeaseTasks`` requests are allowed per queue. - ``RESOURCE_EXHAUSTED`` is returned when this limit is exceeded. - ``RESOURCE_EXHAUSTED`` is also returned when - ``max_tasks_dispatched_per_second`` is exceeded. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].LeaseTasks - - @property - def acknowledge_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.acknowledge_task`. - - Acknowledges a pull task. - - The worker, that is, the entity that ``leased`` this task must call this - method to indicate that the work associated with the task has finished. - - The worker must acknowledge a task within the ``lease_duration`` or the - lease will expire and the task will become available to be leased again. - After the task is acknowledged, it will not be returned by a later - ``LeaseTasks``, ``GetTask``, or ``ListTasks``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].AcknowledgeTask - - @property - def renew_lease(self): - """Return the gRPC stub for :meth:`CloudTasksClient.renew_lease`. - - Renew the current lease of a pull task. - - The worker can use this method to extend the lease by a new duration, - starting from now. The new task lease will be returned in the task's - ``schedule_time``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].RenewLease - - @property - def cancel_lease(self): - """Return the gRPC stub for :meth:`CloudTasksClient.cancel_lease`. - - Cancel a pull task's lease. - - The worker can use this method to cancel a task's lease by setting its - ``schedule_time`` to now. This will make the task available to be leased - to the next caller of ``LeaseTasks``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].CancelLease - - @property - def run_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.run_task`. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its ``RateLimits`` or - is ``PAUSED``. - - This command is meant to be used for manual debugging. For example, - ``RunTask`` can be used to retry a failed task after a fix has been made - or to manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the ``status`` after the task is dispatched but before the task - is received by its target. - - If Cloud Tasks receives a successful response from the task's target, - then the task will be deleted; otherwise the task's ``schedule_time`` - will be reset to the time that ``RunTask`` was called plus the retry - delay specified in the queue's ``RetryConfig``. - - ``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has - already succeeded or permanently failed. - - ``RunTask`` cannot be called on a ``pull task``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].RunTask diff --git a/google/cloud/tasks_v2beta2/proto/__init__.py b/google/cloud/tasks_v2beta2/proto/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py b/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py deleted file mode 100644 index bd5a0365..00000000 --- a/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py +++ /dev/null @@ -1,2310 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta2/proto/cloudtasks.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2beta2.proto import ( - queue_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2, -) -from google.cloud.tasks_v2beta2.proto import ( - task_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2, -) -from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 -from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta2/proto/cloudtasks.proto", - package="google.cloud.tasks.v2beta2", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta2B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\242\002\005TASKS", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n1google/cloud/tasks_v2beta2/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta2/proto/queue.proto\x1a+google/cloud/tasks_v2beta2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x84\x01\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaf\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xbe\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta2.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\xe4\x01\n\x11LeaseTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x11\n\tmax_tasks\x18\x02 \x01(\x05\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t"E\n\x12LeaseTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task"\x86\x01\n\x16\x41\x63knowledgeTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"\xf7\x01\n\x11RenewLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xc0\x01\n\x12\x43\x61ncelLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View2\xd4\x1c\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta2.ListQueuesRequest\x1a..google.cloud.tasks.v2beta2.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta2.GetQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta2.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta2.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta2.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta2.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta2.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta2.ListTasksRequest\x1a-.google.cloud.tasks.v2beta2.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta2.GetTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta2.CreateTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xcd\x01\n\nLeaseTasks\x12-.google.cloud.tasks.v2beta2.LeaseTasksRequest\x1a..google.cloud.tasks.v2beta2.LeaseTasksResponse"`\x82\xd3\xe4\x93\x02\x42"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\x01*\xda\x41\x15parent,lease_duration\x12\xc2\x01\n\x0f\x41\x63knowledgeTask\x12\x32.google.cloud.tasks.v2beta2.AcknowledgeTaskRequest\x1a\x16.google.protobuf.Empty"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\x01*\xda\x41\x12name,schedule_time\x12\xd0\x01\n\nRenewLease\x12-.google.cloud.tasks.v2beta2.RenewLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"q\x82\xd3\xe4\x93\x02G"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\x01*\xda\x41!name,schedule_time,lease_duration\x12\xc4\x01\n\x0b\x43\x61ncelLease\x12..google.cloud.tasks.v2beta2.CancelLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\x01*\xda\x41\x12name,schedule_time\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta2.RunTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"M\x82\xd3\xe4\x93\x02@";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\xa2\x02\x05TASKSb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.DESCRIPTOR, - google_dot_iam_dot_v1_dot_iam__policy__pb2.DESCRIPTOR, - google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_LISTQUEUESREQUEST = _descriptor.Descriptor( - name="ListQueuesRequest", - full_name="google.cloud.tasks.v2beta2.ListQueuesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta2.ListQueuesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.tasks.v2beta2.ListQueuesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.tasks.v2beta2.ListQueuesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.tasks.v2beta2.ListQueuesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=476, - serialized_end=607, -) - - -_LISTQUEUESRESPONSE = _descriptor.Descriptor( - name="ListQueuesResponse", - full_name="google.cloud.tasks.v2beta2.ListQueuesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="queues", - full_name="google.cloud.tasks.v2beta2.ListQueuesResponse.queues", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=609, - serialized_end=705, -) - - -_GETQUEUEREQUEST = _descriptor.Descriptor( - name="GetQueueRequest", - full_name="google.cloud.tasks.v2beta2.GetQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.GetQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=707, - serialized_end=779, -) - - -_CREATEQUEUEREQUEST = _descriptor.Descriptor( - name="CreateQueueRequest", - full_name="google.cloud.tasks.v2beta2.CreateQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta2.CreateQueueRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="queue", - full_name="google.cloud.tasks.v2beta2.CreateQueueRequest.queue", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=782, - serialized_end=914, -) - - -_UPDATEQUEUEREQUEST = _descriptor.Descriptor( - name="UpdateQueueRequest", - full_name="google.cloud.tasks.v2beta2.UpdateQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="queue", - full_name="google.cloud.tasks.v2beta2.UpdateQueueRequest.queue", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.tasks.v2beta2.UpdateQueueRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=916, - serialized_end=1040, -) - - -_DELETEQUEUEREQUEST = _descriptor.Descriptor( - name="DeleteQueueRequest", - full_name="google.cloud.tasks.v2beta2.DeleteQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.DeleteQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1042, - serialized_end=1117, -) - - -_PURGEQUEUEREQUEST = _descriptor.Descriptor( - name="PurgeQueueRequest", - full_name="google.cloud.tasks.v2beta2.PurgeQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.PurgeQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1119, - serialized_end=1193, -) - - -_PAUSEQUEUEREQUEST = _descriptor.Descriptor( - name="PauseQueueRequest", - full_name="google.cloud.tasks.v2beta2.PauseQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.PauseQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1195, - serialized_end=1269, -) - - -_RESUMEQUEUEREQUEST = _descriptor.Descriptor( - name="ResumeQueueRequest", - full_name="google.cloud.tasks.v2beta2.ResumeQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.ResumeQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1271, - serialized_end=1346, -) - - -_LISTTASKSREQUEST = _descriptor.Descriptor( - name="ListTasksRequest", - full_name="google.cloud.tasks.v2beta2.ListTasksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta2.ListTasksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \022\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta2.ListTasksRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.tasks.v2beta2.ListTasksRequest.page_size", - index=2, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.tasks.v2beta2.ListTasksRequest.page_token", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1349, - serialized_end=1524, -) - - -_LISTTASKSRESPONSE = _descriptor.Descriptor( - name="ListTasksResponse", - full_name="google.cloud.tasks.v2beta2.ListTasksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="tasks", - full_name="google.cloud.tasks.v2beta2.ListTasksResponse.tasks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1526, - serialized_end=1619, -) - - -_GETTASKREQUEST = _descriptor.Descriptor( - name="GetTaskRequest", - full_name="google.cloud.tasks.v2beta2.GetTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.GetTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta2.GetTaskRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1622, - serialized_end=1754, -) - - -_CREATETASKREQUEST = _descriptor.Descriptor( - name="CreateTaskRequest", - full_name="google.cloud.tasks.v2beta2.CreateTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta2.CreateTaskRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \022\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="task", - full_name="google.cloud.tasks.v2beta2.CreateTaskRequest.task", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta2.CreateTaskRequest.response_view", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1757, - serialized_end=1947, -) - - -_DELETETASKREQUEST = _descriptor.Descriptor( - name="DeleteTaskRequest", - full_name="google.cloud.tasks.v2beta2.DeleteTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.DeleteTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1949, - serialized_end=2022, -) - - -_LEASETASKSREQUEST = _descriptor.Descriptor( - name="LeaseTasksRequest", - full_name="google.cloud.tasks.v2beta2.LeaseTasksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta2.LeaseTasksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \022\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_tasks", - full_name="google.cloud.tasks.v2beta2.LeaseTasksRequest.max_tasks", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="lease_duration", - full_name="google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.tasks.v2beta2.LeaseTasksRequest.filter", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2025, - serialized_end=2253, -) - - -_LEASETASKSRESPONSE = _descriptor.Descriptor( - name="LeaseTasksResponse", - full_name="google.cloud.tasks.v2beta2.LeaseTasksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="tasks", - full_name="google.cloud.tasks.v2beta2.LeaseTasksResponse.tasks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2255, - serialized_end=2324, -) - - -_ACKNOWLEDGETASKREQUEST = _descriptor.Descriptor( - name="AcknowledgeTaskRequest", - full_name="google.cloud.tasks.v2beta2.AcknowledgeTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.AcknowledgeTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2beta2.AcknowledgeTaskRequest.schedule_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2327, - serialized_end=2461, -) - - -_RENEWLEASEREQUEST = _descriptor.Descriptor( - name="RenewLeaseRequest", - full_name="google.cloud.tasks.v2beta2.RenewLeaseRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.RenewLeaseRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2beta2.RenewLeaseRequest.schedule_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="lease_duration", - full_name="google.cloud.tasks.v2beta2.RenewLeaseRequest.lease_duration", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta2.RenewLeaseRequest.response_view", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2464, - serialized_end=2711, -) - - -_CANCELLEASEREQUEST = _descriptor.Descriptor( - name="CancelLeaseRequest", - full_name="google.cloud.tasks.v2beta2.CancelLeaseRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.CancelLeaseRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2beta2.CancelLeaseRequest.schedule_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta2.CancelLeaseRequest.response_view", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2714, - serialized_end=2906, -) - - -_RUNTASKREQUEST = _descriptor.Descriptor( - name="RunTaskRequest", - full_name="google.cloud.tasks.v2beta2.RunTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.RunTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta2.RunTaskRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2909, - serialized_end=3041, -) - -_LISTQUEUESRESPONSE.fields_by_name[ - "queues" -].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE -_CREATEQUEUEREQUEST.fields_by_name[ - "queue" -].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE -_UPDATEQUEUEREQUEST.fields_by_name[ - "queue" -].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE -_UPDATEQUEUEREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTTASKSREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW -_LISTTASKSRESPONSE.fields_by_name[ - "tasks" -].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK -_GETTASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW -_CREATETASKREQUEST.fields_by_name[ - "task" -].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK -_CREATETASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW -_LEASETASKSREQUEST.fields_by_name[ - "lease_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_LEASETASKSREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW -_LEASETASKSRESPONSE.fields_by_name[ - "tasks" -].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK -_ACKNOWLEDGETASKREQUEST.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_RENEWLEASEREQUEST.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_RENEWLEASEREQUEST.fields_by_name[ - "lease_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RENEWLEASEREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW -_CANCELLEASEREQUEST.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_CANCELLEASEREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW -_RUNTASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW -DESCRIPTOR.message_types_by_name["ListQueuesRequest"] = _LISTQUEUESREQUEST -DESCRIPTOR.message_types_by_name["ListQueuesResponse"] = _LISTQUEUESRESPONSE -DESCRIPTOR.message_types_by_name["GetQueueRequest"] = _GETQUEUEREQUEST -DESCRIPTOR.message_types_by_name["CreateQueueRequest"] = _CREATEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["UpdateQueueRequest"] = _UPDATEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["DeleteQueueRequest"] = _DELETEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["PurgeQueueRequest"] = _PURGEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["PauseQueueRequest"] = _PAUSEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["ResumeQueueRequest"] = _RESUMEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["ListTasksRequest"] = _LISTTASKSREQUEST -DESCRIPTOR.message_types_by_name["ListTasksResponse"] = _LISTTASKSRESPONSE -DESCRIPTOR.message_types_by_name["GetTaskRequest"] = _GETTASKREQUEST -DESCRIPTOR.message_types_by_name["CreateTaskRequest"] = _CREATETASKREQUEST -DESCRIPTOR.message_types_by_name["DeleteTaskRequest"] = _DELETETASKREQUEST -DESCRIPTOR.message_types_by_name["LeaseTasksRequest"] = _LEASETASKSREQUEST -DESCRIPTOR.message_types_by_name["LeaseTasksResponse"] = _LEASETASKSRESPONSE -DESCRIPTOR.message_types_by_name["AcknowledgeTaskRequest"] = _ACKNOWLEDGETASKREQUEST -DESCRIPTOR.message_types_by_name["RenewLeaseRequest"] = _RENEWLEASEREQUEST -DESCRIPTOR.message_types_by_name["CancelLeaseRequest"] = _CANCELLEASEREQUEST -DESCRIPTOR.message_types_by_name["RunTaskRequest"] = _RUNTASKREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListQueuesRequest = _reflection.GeneratedProtocolMessageType( - "ListQueuesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTQUEUESREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - - Attributes: - parent: - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter: - \ ``filter`` can be used to specify a subset of queues. Any - [Queue][google.cloud.tasks.v2beta2.Queue] field can be used as - a filter and several operators as supported. For example: - ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver’s Advanced Logs Filters `_. Sample - filter "app_engine_http_target: \*". Note that using filters - might cause fewer queues than the requested_page size to be - returned. - page_size: - Requested page size. The maximum page size is 9800. If - unspecified, the page size will be the maximum. Fewer queues - than requested might be returned, even if more queues exist; - use the [next_page_token][google.cloud.tasks.v2beta2.ListQueue - sResponse.next_page_token] in the response to determine if - more queues exist. - page_token: - A token identifying the page of results to return. To request - the first page results, page_token must be empty. To request - the next page of results, page_token must be the value of [nex - t_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.ne - xt_page_token] returned from the previous call to - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] - method. It is an error to switch the value of the - [filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter] - while iterating through pages. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListQueuesRequest) - }, -) -_sym_db.RegisterMessage(ListQueuesRequest) - -ListQueuesResponse = _reflection.GeneratedProtocolMessageType( - "ListQueuesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTQUEUESRESPONSE, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Response message for - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. - - Attributes: - queues: - The list of queues. - next_page_token: - A token to retrieve next page of results. To return the next - page of results, call - [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] - with this value as the [page_token][google.cloud.tasks.v2beta2 - .ListQueuesRequest.page_token]. If the next_page_token is - empty, there are no more results. The page token is valid for - only 2 hours. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListQueuesResponse) - }, -) -_sym_db.RegisterMessage(ListQueuesResponse) - -GetQueueRequest = _reflection.GeneratedProtocolMessageType( - "GetQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. - - Attributes: - name: - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.GetQueueRequest) - }, -) -_sym_db.RegisterMessage(GetQueueRequest) - -CreateQueueRequest = _reflection.GeneratedProtocolMessageType( - "CreateQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. - - Attributes: - parent: - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` The list of - allowed locations can be obtained by calling Cloud Tasks’ - implementation of [ListLocations][google.cloud.location.Locati - ons.ListLocations]. - queue: - Required. The queue to create. [Queue’s - name][google.cloud.tasks.v2beta2.Queue.name] cannot be the - same as an existing queue. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CreateQueueRequest) - }, -) -_sym_db.RegisterMessage(CreateQueueRequest) - -UpdateQueueRequest = _reflection.GeneratedProtocolMessageType( - "UpdateQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. - - Attributes: - queue: - Required. The queue to create or update. The queue’s - [name][google.cloud.tasks.v2beta2.Queue.name] must be - specified. Output only fields cannot be modified using - UpdateQueue. Any value specified for an output only field will - be ignored. The queue’s - [name][google.cloud.tasks.v2beta2.Queue.name] cannot be - changed. - update_mask: - A mask used to specify which fields of the queue are being - updated. If empty, then all fields will be updated. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.UpdateQueueRequest) - }, -) -_sym_db.RegisterMessage(UpdateQueueRequest) - -DeleteQueueRequest = _reflection.GeneratedProtocolMessageType( - "DeleteQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.DeleteQueueRequest) - }, -) -_sym_db.RegisterMessage(DeleteQueueRequest) - -PurgeQueueRequest = _reflection.GeneratedProtocolMessageType( - "PurgeQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _PURGEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PurgeQueueRequest) - }, -) -_sym_db.RegisterMessage(PurgeQueueRequest) - -PauseQueueRequest = _reflection.GeneratedProtocolMessageType( - "PauseQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _PAUSEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PauseQueueRequest) - }, -) -_sym_db.RegisterMessage(PauseQueueRequest) - -ResumeQueueRequest = _reflection.GeneratedProtocolMessageType( - "ResumeQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _RESUMEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ResumeQueueRequest) - }, -) -_sym_db.RegisterMessage(ResumeQueueRequest) - -ListTasksRequest = _reflection.GeneratedProtocolMessageType( - "ListTasksRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTASKSREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Attributes: - parent: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - page_size: - Maximum page size. Fewer tasks than requested might be - returned, even if more tasks exist; use [next_page_token][goog - le.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] in - the response to determine if more tasks exist. The maximum - page size is 1000. If unspecified, the page size will be the - maximum. - page_token: - A token identifying the page of results to return. To request - the first page results, page_token must be empty. To request - the next page of results, page_token must be the value of [nex - t_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.nex - t_page_token] returned from the previous call to - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] - method. The page token is valid for only 2 hours. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListTasksRequest) - }, -) -_sym_db.RegisterMessage(ListTasksRequest) - -ListTasksResponse = _reflection.GeneratedProtocolMessageType( - "ListTasksResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTASKSRESPONSE, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - - Attributes: - tasks: - The list of tasks. - next_page_token: - A token to retrieve next page of results. To return the next - page of results, call - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] - with this value as the [page_token][google.cloud.tasks.v2beta2 - .ListTasksRequest.page_token]. If the next_page_token is - empty, there are no more results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListTasksResponse) - }, -) -_sym_db.RegisterMessage(ListTasksResponse) - -GetTaskRequest = _reflection.GeneratedProtocolMessageType( - "GetTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTASKREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for getting a task using - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.GetTaskRequest) - }, -) -_sym_db.RegisterMessage(GetTaskRequest) - -CreateTaskRequest = _reflection.GeneratedProtocolMessageType( - "CreateTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATETASKREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - - Attributes: - parent: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - The queue must already exist. - task: - Required. The task to add. Task names have the following - format: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUE - UE_ID/tasks/TASK_ID``. The user can optionally specify a task - [name][google.cloud.tasks.v2beta2.Task.name]. If a name is not - specified then the system will generate a random unique task - id, which will be set in the task returned in the - [response][google.cloud.tasks.v2beta2.Task.name]. If - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set it to - the current time. Task De-duplication: Explicitly specifying - a task ID enables task de-duplication. If a task’s ID is - identical to that of an existing task or a task that was - deleted or completed recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task’s queue was created using Cloud Tasks, then another task - with the same name can’t be created for ~1hour after the - original task was deleted or completed. If the task’s queue - was created using queue.yaml or queue.xml, then another task - with the same name can’t be created for ~9days after the - original task was deleted or completed. Because there is an - extra lookup cost to identify duplicate task names, these - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have - sequential prefixes, for example using a timestamp, causes an - increase in latency and error rates in all task commands. The - infrastructure relies on an approximately uniform distribution - of task ids to store and serve tasks efficiently. - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CreateTaskRequest) - }, -) -_sym_db.RegisterMessage(CreateTaskRequest) - -DeleteTaskRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETASKREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for deleting a task using - [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.DeleteTaskRequest) - }, -) -_sym_db.RegisterMessage(DeleteTaskRequest) - -LeaseTasksRequest = _reflection.GeneratedProtocolMessageType( - "LeaseTasksRequest", - (_message.Message,), - { - "DESCRIPTOR": _LEASETASKSREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for leasing tasks using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Attributes: - parent: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - max_tasks: - The maximum number of tasks to lease. The system will make a - best effort to return as close to as ``max_tasks`` as - possible. The largest that ``max_tasks`` can be is 1000. The - maximum total size of a [lease tasks - response][google.cloud.tasks.v2beta2.LeaseTasksResponse] is 32 - MB. If the sum of all task sizes requested reaches this limit, - fewer tasks than requested are returned. - lease_duration: - Required. The duration of the lease. Each task returned in - the [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] - will have its - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - set to the current time plus the ``lease_duration``. The task - is leased until its [schedule_time][google.cloud.tasks.v2beta2 - .Task.schedule_time]; thus, the task will not be returned to - another - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call before its [schedule_time][google.cloud.tasks.v2beta2.Tas - k.schedule_time]. After the worker has successfully finished - the work associated with the task, the worker must call via [A - cknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.Acknowle - dgeTask] before the [schedule_time][google.cloud.tasks.v2beta2 - .Task.schedule_time]. Otherwise the task will be returned to a - later - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - call so that another worker can retry it. The maximum lease - duration is 1 week. ``lease_duration`` will be truncated to - the nearest second. - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - filter: - \ ``filter`` can be used to specify a subset of tasks to - lease. When ``filter`` is set to ``tag=`` then the - [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] will - contain only tasks whose - [tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal to - ````. ```` must be less than 500 characters. - When ``filter`` is set to ``tag_function=oldest_tag()``, only - tasks which have the same tag as the task with the oldest - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - will be returned. Grammar Syntax: - ``filter = "tag=" tag | - "tag_function=" function`` - ``tag = string`` - ``function - = "oldest_tag()"`` The ``oldest_tag()`` function returns - tasks which have the same tag as the oldest task (ordered by - schedule time). SDK compatibility: Although the SDK allows - tags to be either string or `bytes `_, only UTF-8 encoded - tags can be used in Cloud Tasks. Tag which aren’t UTF-8 - encoded can’t be used in the - [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter] - and the task’s - [tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be - displayed as empty in Cloud Tasks. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.LeaseTasksRequest) - }, -) -_sym_db.RegisterMessage(LeaseTasksRequest) - -LeaseTasksResponse = _reflection.GeneratedProtocolMessageType( - "LeaseTasksResponse", - (_message.Message,), - { - "DESCRIPTOR": _LEASETASKSRESPONSE, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Response message for leasing tasks using - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - - Attributes: - tasks: - The leased tasks. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.LeaseTasksResponse) - }, -) -_sym_db.RegisterMessage(LeaseTasksResponse) - -AcknowledgeTaskRequest = _reflection.GeneratedProtocolMessageType( - "AcknowledgeTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _ACKNOWLEDGETASKREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for acknowledging a task using [AcknowledgeTask][googl - e.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time: - Required. The task’s current schedule time, available in the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AcknowledgeTaskRequest) - }, -) -_sym_db.RegisterMessage(AcknowledgeTaskRequest) - -RenewLeaseRequest = _reflection.GeneratedProtocolMessageType( - "RenewLeaseRequest", - (_message.Message,), - { - "DESCRIPTOR": _RENEWLEASEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for renewing a lease using - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time: - Required. The task’s current schedule time, available in the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - lease_duration: - Required. The desired new lease duration, starting from now. - The maximum lease duration is 1 week. ``lease_duration`` will - be truncated to the nearest second. - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RenewLeaseRequest) - }, -) -_sym_db.RegisterMessage(RenewLeaseRequest) - -CancelLeaseRequest = _reflection.GeneratedProtocolMessageType( - "CancelLeaseRequest", - (_message.Message,), - { - "DESCRIPTOR": _CANCELLEASEREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for canceling a lease using - [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - schedule_time: - Required. The task’s current schedule time, available in the - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] - returned by - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - response or - [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] - response. This restriction is to ensure that your worker - currently holds the lease. - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CancelLeaseRequest) - }, -) -_sym_db.RegisterMessage(CancelLeaseRequest) - -RunTaskRequest = _reflection.GeneratedProtocolMessageType( - "RunTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _RUNTASKREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.cloudtasks_pb2", - "__doc__": """Request message for forcing a task to run now using - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta2.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RunTaskRequest) - }, -) -_sym_db.RegisterMessage(RunTaskRequest) - - -DESCRIPTOR._options = None -_LISTQUEUESREQUEST.fields_by_name["parent"]._options = None -_GETQUEUEREQUEST.fields_by_name["name"]._options = None -_CREATEQUEUEREQUEST.fields_by_name["parent"]._options = None -_CREATEQUEUEREQUEST.fields_by_name["queue"]._options = None -_UPDATEQUEUEREQUEST.fields_by_name["queue"]._options = None -_DELETEQUEUEREQUEST.fields_by_name["name"]._options = None -_PURGEQUEUEREQUEST.fields_by_name["name"]._options = None -_PAUSEQUEUEREQUEST.fields_by_name["name"]._options = None -_RESUMEQUEUEREQUEST.fields_by_name["name"]._options = None -_LISTTASKSREQUEST.fields_by_name["parent"]._options = None -_GETTASKREQUEST.fields_by_name["name"]._options = None -_CREATETASKREQUEST.fields_by_name["parent"]._options = None -_CREATETASKREQUEST.fields_by_name["task"]._options = None -_DELETETASKREQUEST.fields_by_name["name"]._options = None -_LEASETASKSREQUEST.fields_by_name["parent"]._options = None -_LEASETASKSREQUEST.fields_by_name["lease_duration"]._options = None -_ACKNOWLEDGETASKREQUEST.fields_by_name["name"]._options = None -_ACKNOWLEDGETASKREQUEST.fields_by_name["schedule_time"]._options = None -_RENEWLEASEREQUEST.fields_by_name["name"]._options = None -_RENEWLEASEREQUEST.fields_by_name["schedule_time"]._options = None -_RENEWLEASEREQUEST.fields_by_name["lease_duration"]._options = None -_CANCELLEASEREQUEST.fields_by_name["name"]._options = None -_CANCELLEASEREQUEST.fields_by_name["schedule_time"]._options = None -_RUNTASKREQUEST.fields_by_name["name"]._options = None - -_CLOUDTASKS = _descriptor.ServiceDescriptor( - name="CloudTasks", - full_name="google.cloud.tasks.v2beta2.CloudTasks", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform", - create_key=_descriptor._internal_create_key, - serialized_start=3044, - serialized_end=6712, - methods=[ - _descriptor.MethodDescriptor( - name="ListQueues", - full_name="google.cloud.tasks.v2beta2.CloudTasks.ListQueues", - index=0, - containing_service=None, - input_type=_LISTQUEUESREQUEST, - output_type=_LISTQUEUESRESPONSE, - serialized_options=b"\202\323\344\223\0021\022//v2beta2/{parent=projects/*/locations/*}/queues\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetQueue", - full_name="google.cloud.tasks.v2beta2.CloudTasks.GetQueue", - index=1, - containing_service=None, - input_type=_GETQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b"\202\323\344\223\0021\022//v2beta2/{name=projects/*/locations/*/queues/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateQueue", - full_name="google.cloud.tasks.v2beta2.CloudTasks.CreateQueue", - index=2, - containing_service=None, - input_type=_CREATEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\0028"//v2beta2/{parent=projects/*/locations/*}/queues:\005queue\332A\014parent,queue', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateQueue", - full_name="google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue", - index=3, - containing_service=None, - input_type=_UPDATEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b"\202\323\344\223\002>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\005queue\332A\021queue,update_mask", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteQueue", - full_name="google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue", - index=4, - containing_service=None, - input_type=_DELETEQUEUEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\0021*//v2beta2/{name=projects/*/locations/*/queues/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="PurgeQueue", - full_name="google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue", - index=5, - containing_service=None, - input_type=_PURGEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\002:"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="PauseQueue", - full_name="google.cloud.tasks.v2beta2.CloudTasks.PauseQueue", - index=6, - containing_service=None, - input_type=_PAUSEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\002:"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ResumeQueue", - full_name="google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue", - index=7, - containing_service=None, - input_type=_RESUMEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\002;"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetIamPolicy", - full_name="google.cloud.tasks.v2beta2.CloudTasks.GetIamPolicy", - index=8, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, - output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, - serialized_options=b'\202\323\344\223\002E"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\001*\332A\010resource', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="SetIamPolicy", - full_name="google.cloud.tasks.v2beta2.CloudTasks.SetIamPolicy", - index=9, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, - output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, - serialized_options=b'\202\323\344\223\002E"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\001*\332A\017resource,policy', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="TestIamPermissions", - full_name="google.cloud.tasks.v2beta2.CloudTasks.TestIamPermissions", - index=10, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, - output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, - serialized_options=b'\202\323\344\223\002K"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\001*\332A\024resource,permissions', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTasks", - full_name="google.cloud.tasks.v2beta2.CloudTasks.ListTasks", - index=11, - containing_service=None, - input_type=_LISTTASKSREQUEST, - output_type=_LISTTASKSRESPONSE, - serialized_options=b"\202\323\344\223\0029\0227/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTask", - full_name="google.cloud.tasks.v2beta2.CloudTasks.GetTask", - index=12, - containing_service=None, - input_type=_GETTASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, - serialized_options=b"\202\323\344\223\0029\0227/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateTask", - full_name="google.cloud.tasks.v2beta2.CloudTasks.CreateTask", - index=13, - containing_service=None, - input_type=_CREATETASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\002<"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\001*\332A\013parent,task', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTask", - full_name="google.cloud.tasks.v2beta2.CloudTasks.DeleteTask", - index=14, - containing_service=None, - input_type=_DELETETASKREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\0029*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="LeaseTasks", - full_name="google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks", - index=15, - containing_service=None, - input_type=_LEASETASKSREQUEST, - output_type=_LEASETASKSRESPONSE, - serialized_options=b'\202\323\344\223\002B"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\001*\332A\025parent,lease_duration', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AcknowledgeTask", - full_name="google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask", - index=16, - containing_service=None, - input_type=_ACKNOWLEDGETASKREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\001*\332A\022name,schedule_time', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="RenewLease", - full_name="google.cloud.tasks.v2beta2.CloudTasks.RenewLease", - index=17, - containing_service=None, - input_type=_RENEWLEASEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\002G"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\001*\332A!name,schedule_time,lease_duration', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CancelLease", - full_name="google.cloud.tasks.v2beta2.CloudTasks.CancelLease", - index=18, - containing_service=None, - input_type=_CANCELLEASEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\002H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\001*\332A\022name,schedule_time', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="RunTask", - full_name="google.cloud.tasks.v2beta2.CloudTasks.RunTask", - index=19, - containing_service=None, - input_type=_RUNTASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\002@";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_CLOUDTASKS) - -DESCRIPTOR.services_by_name["CloudTasks"] = _CLOUDTASKS - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py b/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py deleted file mode 100644 index 8b3a5531..00000000 --- a/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py +++ /dev/null @@ -1,1103 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.tasks_v2beta2.proto import ( - cloudtasks_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2, -) -from google.cloud.tasks_v2beta2.proto import ( - queue_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2, -) -from google.cloud.tasks_v2beta2.proto import ( - task_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2, -) -from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 -from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class CloudTasksStub(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListQueues = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, - ) - self.GetQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.CreateQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.UpdateQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.DeleteQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.PurgeQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.PauseQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.ResumeQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.GetIamPolicy = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - ) - self.SetIamPolicy = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - ) - self.TestIamPermissions = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, - ) - self.ListTasks = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, - ) - self.GetTask = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - ) - self.CreateTask = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - ) - self.DeleteTask = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.LeaseTasks = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksResponse.FromString, - ) - self.AcknowledgeTask = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.AcknowledgeTaskRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.RenewLease = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RenewLeaseRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - ) - self.CancelLease = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CancelLeaseRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - ) - self.RunTask = channel.unary_unary( - "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - ) - - -class CloudTasksServicer(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - def ListQueues(self, request, context): - """Lists queues. - - Queues are returned in lexicographical order. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetQueue(self, request, context): - """Gets a queue. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateQueue(self, request, context): - """Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless of whether - it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateQueue(self, request, context): - """Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless of whether - it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteQueue(self, request, context): - """Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PurgeQueue(self, request, context): - """Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PauseQueue(self, request, context): - """Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. Tasks can still be added - when the queue is paused. A queue is paused if its - [state][google.cloud.tasks.v2beta2.Queue.state] is [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ResumeQueue(self, request, context): - """Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The state of a queue is stored - in the queue's [state][google.cloud.tasks.v2beta2.Queue.state]; after calling this method it - will be set to [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can - lead to target overloading. If you are resuming high-QPS - queues, follow the 500/50/5 pattern described in - [Managing Cloud Tasks Scaling - Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetIamPolicy(self, request, context): - """Gets the access control policy for a [Queue][google.cloud.tasks.v2beta2.Queue]. - Returns an empty policy if the resource exists and does not have a policy - set. - - Authorization requires the following - [Google IAM](https://cloud.google.com/iam) permission on the specified - resource parent: - - * `cloudtasks.queues.getIamPolicy` - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def SetIamPolicy(self, request, context): - """Sets the access control policy for a [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following - [Google IAM](https://cloud.google.com/iam) permission on the specified - resource parent: - - * `cloudtasks.queues.setIamPolicy` - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def TestIamPermissions(self, request, context): - """Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2beta2.Queue]. - If the resource does not exist, this will return an empty set of - permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building permission-aware - UIs and command-line tools, not for authorization checking. This operation - may "fail open" without warning. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTasks(self, request, context): - """Lists the tasks in a queue. - - By default, only the [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is retrieved - due to performance considerations; - [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] controls the - subset of information which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTask(self, request, context): - """Gets a task. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateTask(self, request, context): - """Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the maximum task size is - 100KB. - * For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the maximum task size is 1MB. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTask(self, request, context): - """Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has completed successfully or permanently - failed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def LeaseTasks(self, request, context): - """Leases tasks from a pull queue for - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. - - This method is invoked by the worker to obtain a lease. The - worker must acknowledge the task via - [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] after they have - performed the work associated with the task. - - The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is intended to store data that - the worker needs to perform the work associated with the task. To - return the payloads in the [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set - [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] to - [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. - - A maximum of 10 qps of [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - requests are allowed per - queue. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] - is returned when this limit is - exceeded. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] - is also returned when - [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] - is exceeded. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AcknowledgeTask(self, request, context): - """Acknowledges a pull task. - - The worker, that is, the entity that - [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this task must call this method - to indicate that the work associated with the task has finished. - - The worker must acknowledge a task within the - [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] or the lease - will expire and the task will become available to be leased - again. After the task is acknowledged, it will not be returned - by a later [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], - [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or - [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def RenewLease(self, request, context): - """Renew the current lease of a pull task. - - The worker can use this method to extend the lease by a new - duration, starting from now. The new task lease will be - returned in the task's [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CancelLease(self, request, context): - """Cancel a pull task's lease. - - The worker can use this method to cancel a task's lease by - setting its [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] to now. This will - make the task available to be leased to the next caller of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def RunTask(self, request, context): - """Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or - is [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be used to retry a failed - task after a fix has been made or to manually force a task to be - dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the [status][google.cloud.tasks.v2beta2.Task.status] after the task is dispatched but - before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] will be reset to the time that - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was called plus the retry delay specified - in the queue's [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot be called on a - [pull task][google.cloud.tasks.v2beta2.PullMessage]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_CloudTasksServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListQueues": grpc.unary_unary_rpc_method_handler( - servicer.ListQueues, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.SerializeToString, - ), - "GetQueue": grpc.unary_unary_rpc_method_handler( - servicer.GetQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "CreateQueue": grpc.unary_unary_rpc_method_handler( - servicer.CreateQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "UpdateQueue": grpc.unary_unary_rpc_method_handler( - servicer.UpdateQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "DeleteQueue": grpc.unary_unary_rpc_method_handler( - servicer.DeleteQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "PurgeQueue": grpc.unary_unary_rpc_method_handler( - servicer.PurgeQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "PauseQueue": grpc.unary_unary_rpc_method_handler( - servicer.PauseQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "ResumeQueue": grpc.unary_unary_rpc_method_handler( - servicer.ResumeQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "GetIamPolicy": grpc.unary_unary_rpc_method_handler( - servicer.GetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - "SetIamPolicy": grpc.unary_unary_rpc_method_handler( - servicer.SetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - "TestIamPermissions": grpc.unary_unary_rpc_method_handler( - servicer.TestIamPermissions, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, - ), - "ListTasks": grpc.unary_unary_rpc_method_handler( - servicer.ListTasks, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.SerializeToString, - ), - "GetTask": grpc.unary_unary_rpc_method_handler( - servicer.GetTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "CreateTask": grpc.unary_unary_rpc_method_handler( - servicer.CreateTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "DeleteTask": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "LeaseTasks": grpc.unary_unary_rpc_method_handler( - servicer.LeaseTasks, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksResponse.SerializeToString, - ), - "AcknowledgeTask": grpc.unary_unary_rpc_method_handler( - servicer.AcknowledgeTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.AcknowledgeTaskRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "RenewLease": grpc.unary_unary_rpc_method_handler( - servicer.RenewLease, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RenewLeaseRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "CancelLease": grpc.unary_unary_rpc_method_handler( - servicer.CancelLease, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CancelLeaseRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "RunTask": grpc.unary_unary_rpc_method_handler( - servicer.RunTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.tasks.v2beta2.CloudTasks", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class CloudTasks(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - @staticmethod - def ListQueues( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def PurgeQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def PauseQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ResumeQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetIamPolicy( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", - google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, - google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def SetIamPolicy( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", - google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, - google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def TestIamPermissions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", - google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, - google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTasks( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def LeaseTasks( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def AcknowledgeTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.AcknowledgeTaskRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def RenewLease( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RenewLeaseRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CancelLease( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CancelLeaseRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def RunTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/google/cloud/tasks_v2beta2/proto/queue_pb2.py b/google/cloud/tasks_v2beta2/proto/queue_pb2.py deleted file mode 100644 index 8aeffd22..00000000 --- a/google/cloud/tasks_v2beta2/proto/queue_pb2.py +++ /dev/null @@ -1,801 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta2/proto/queue.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2beta2.proto import ( - target_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta2/proto/queue.proto", - package="google.cloud.tasks.v2beta2", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta2B\nQueueProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n,google/cloud/tasks_v2beta2/proto/queue.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x19google/api/resource.proto\x1a-google/cloud/tasks_v2beta2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xbf\x04\n\x05Queue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Q\n\x16\x61pp_engine_http_target\x18\x03 \x01(\x0b\x32/.google.cloud.tasks.v2beta2.AppEngineHttpTargetH\x00\x12=\n\x0bpull_target\x18\x04 \x01(\x0b\x32&.google.cloud.tasks.v2beta2.PullTargetH\x00\x12;\n\x0brate_limits\x18\x05 \x01(\x0b\x32&.google.cloud.tasks.v2beta2.RateLimits\x12=\n\x0cretry_config\x18\x06 \x01(\x0b\x32\'.google.cloud.tasks.v2beta2.RetryConfig\x12\x36\n\x05state\x18\x07 \x01(\x0e\x32\'.google.cloud.tasks.v2beta2.Queue.State\x12.\n\npurge_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"E\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03:\\\xea\x41Y\n\x1f\x63loudtasks.googleapis.com/Queue\x12\x36projects/{project}/locations/{location}/queues/{queue}B\r\n\x0btarget_type"k\n\nRateLimits\x12\'\n\x1fmax_tasks_dispatched_per_second\x18\x01 \x01(\x01\x12\x16\n\x0emax_burst_size\x18\x02 \x01(\x05\x12\x1c\n\x14max_concurrent_tasks\x18\x03 \x01(\x05"\x81\x02\n\x0bRetryConfig\x12\x16\n\x0cmax_attempts\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12unlimited_attempts\x18\x02 \x01(\x08H\x00\x12\x35\n\x12max_retry_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmin_backoff\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmax_backoff\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x06 \x01(\x05\x42\x0e\n\x0cnum_attemptsBo\n\x1e\x63om.google.cloud.tasks.v2beta2B\nQueueProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasksb\x06proto3', - dependencies=[ - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_QUEUE_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.tasks.v2beta2.Queue.State", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PAUSED", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISABLED", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=643, - serialized_end=712, -) -_sym_db.RegisterEnumDescriptor(_QUEUE_STATE) - - -_QUEUE = _descriptor.Descriptor( - name="Queue", - full_name="google.cloud.tasks.v2beta2.Queue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.Queue.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_http_target", - full_name="google.cloud.tasks.v2beta2.Queue.app_engine_http_target", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="pull_target", - full_name="google.cloud.tasks.v2beta2.Queue.pull_target", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="rate_limits", - full_name="google.cloud.tasks.v2beta2.Queue.rate_limits", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retry_config", - full_name="google.cloud.tasks.v2beta2.Queue.retry_config", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.tasks.v2beta2.Queue.state", - index=5, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="purge_time", - full_name="google.cloud.tasks.v2beta2.Queue.purge_time", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_QUEUE_STATE,], - serialized_options=b"\352AY\n\037cloudtasks.googleapis.com/Queue\0226projects/{project}/locations/{location}/queues/{queue}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="target_type", - full_name="google.cloud.tasks.v2beta2.Queue.target_type", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=246, - serialized_end=821, -) - - -_RATELIMITS = _descriptor.Descriptor( - name="RateLimits", - full_name="google.cloud.tasks.v2beta2.RateLimits", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="max_tasks_dispatched_per_second", - full_name="google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_burst_size", - full_name="google.cloud.tasks.v2beta2.RateLimits.max_burst_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_concurrent_tasks", - full_name="google.cloud.tasks.v2beta2.RateLimits.max_concurrent_tasks", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=823, - serialized_end=930, -) - - -_RETRYCONFIG = _descriptor.Descriptor( - name="RetryConfig", - full_name="google.cloud.tasks.v2beta2.RetryConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="max_attempts", - full_name="google.cloud.tasks.v2beta2.RetryConfig.max_attempts", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="unlimited_attempts", - full_name="google.cloud.tasks.v2beta2.RetryConfig.unlimited_attempts", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_retry_duration", - full_name="google.cloud.tasks.v2beta2.RetryConfig.max_retry_duration", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="min_backoff", - full_name="google.cloud.tasks.v2beta2.RetryConfig.min_backoff", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_backoff", - full_name="google.cloud.tasks.v2beta2.RetryConfig.max_backoff", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_doublings", - full_name="google.cloud.tasks.v2beta2.RetryConfig.max_doublings", - index=5, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="num_attempts", - full_name="google.cloud.tasks.v2beta2.RetryConfig.num_attempts", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=933, - serialized_end=1190, -) - -_QUEUE.fields_by_name[ - "app_engine_http_target" -].message_type = ( - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._APPENGINEHTTPTARGET -) -_QUEUE.fields_by_name[ - "pull_target" -].message_type = ( - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._PULLTARGET -) -_QUEUE.fields_by_name["rate_limits"].message_type = _RATELIMITS -_QUEUE.fields_by_name["retry_config"].message_type = _RETRYCONFIG -_QUEUE.fields_by_name["state"].enum_type = _QUEUE_STATE -_QUEUE.fields_by_name[ - "purge_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_QUEUE_STATE.containing_type = _QUEUE -_QUEUE.oneofs_by_name["target_type"].fields.append( - _QUEUE.fields_by_name["app_engine_http_target"] -) -_QUEUE.fields_by_name[ - "app_engine_http_target" -].containing_oneof = _QUEUE.oneofs_by_name["target_type"] -_QUEUE.oneofs_by_name["target_type"].fields.append(_QUEUE.fields_by_name["pull_target"]) -_QUEUE.fields_by_name["pull_target"].containing_oneof = _QUEUE.oneofs_by_name[ - "target_type" -] -_RETRYCONFIG.fields_by_name[ - "max_retry_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYCONFIG.fields_by_name[ - "min_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYCONFIG.fields_by_name[ - "max_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYCONFIG.oneofs_by_name["num_attempts"].fields.append( - _RETRYCONFIG.fields_by_name["max_attempts"] -) -_RETRYCONFIG.fields_by_name[ - "max_attempts" -].containing_oneof = _RETRYCONFIG.oneofs_by_name["num_attempts"] -_RETRYCONFIG.oneofs_by_name["num_attempts"].fields.append( - _RETRYCONFIG.fields_by_name["unlimited_attempts"] -) -_RETRYCONFIG.fields_by_name[ - "unlimited_attempts" -].containing_oneof = _RETRYCONFIG.oneofs_by_name["num_attempts"] -DESCRIPTOR.message_types_by_name["Queue"] = _QUEUE -DESCRIPTOR.message_types_by_name["RateLimits"] = _RATELIMITS -DESCRIPTOR.message_types_by_name["RetryConfig"] = _RETRYCONFIG -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Queue = _reflection.GeneratedProtocolMessageType( - "Queue", - (_message.Message,), - { - "DESCRIPTOR": _QUEUE, - "__module__": "google.cloud.tasks_v2beta2.proto.queue_pb2", - "__doc__": """A queue is a container of related tasks. Queues are configured to - manage how those tasks are dispatched. Configurable properties include - rate limits, retry options, target types, and others. - - Attributes: - name: - Caller-specified and required in [CreateQueue][google.cloud.ta - sks.v2beta2.CloudTasks.CreateQueue], after which it becomes - output only. The queue name. The queue name must have the - following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For more - information, see `Identifying projects - `_ - ``LOCATION_ID`` - is the canonical ID for the queue’s location. The list of - available locations can be obtained by calling [ListLocatio - ns][google.cloud.location.Locations.ListLocations]. For - more information, see - https://cloud.google.com/about/locations/. - ``QUEUE_ID`` can - contain letters ([A-Za-z]), numbers ([0-9]), or hyphens - (-). The maximum length is 100 characters. - target_type: - Caller-specified and required in [CreateQueue][google.cloud.ta - sks.v2beta2.CloudTasks.CreateQueue][], after which the queue - config type becomes output only, though fields within the - config are mutable. The queue’s target. The target applies - to all tasks in the queue. - app_engine_http_target: - App Engine HTTP target. An App Engine queue is a queue that - has an [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEng - ineHttpTarget]. - pull_target: - Pull target. A pull queue is a queue that has a - [PullTarget][google.cloud.tasks.v2beta2.PullTarget]. - rate_limits: - Rate limits for task dispatches. - [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] - and - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] - are related because they both control task attempts however - they control how tasks are attempted in different ways: - - [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] - controls the total rate of dispatches from a queue - (i.e. all traffic dispatched from the queue, regardless of - whether the dispatch is from a first attempt or a retry). - - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] - controls what happens to particular a task after its first - attempt fails. That is, - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] - controls task retries (the second attempt, third attempt, - etc). - retry_config: - Settings that determine the retry behavior. - For tasks - created using Cloud Tasks: the queue-level retry settings - apply to all tasks in the queue that were created using Cloud - Tasks. Retry settings cannot be set on individual tasks. - - For tasks created using the App Engine SDK: the queue-level - retry settings apply to all tasks in the queue which do not - have retry settings explicitly set on the task and were - created by the App Engine SDK. See `App Engine - documentation `_. - state: - Output only. The state of the queue. ``state`` can only be - changed by called [PauseQueue][google.cloud.tasks.v2beta2.Clou - dTasks.PauseQueue], [ResumeQueue][google.cloud.tasks.v2beta2.C - loudTasks.ResumeQueue], or uploading `queue.yaml/xml `_. [U - pdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] - cannot be used to change ``state``. - purge_time: - Output only. The last time this queue was purged. All tasks - that were - [created][google.cloud.tasks.v2beta2.Task.create_time] before - this time were purged. A queue can be purged using [PurgeQueu - e][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue], the `App - Engine Task Queue SDK, or the Cloud Console `_. Purge - time will be truncated to the nearest microsecond. Purge time - will be unset if the queue has never been purged. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.Queue) - }, -) -_sym_db.RegisterMessage(Queue) - -RateLimits = _reflection.GeneratedProtocolMessageType( - "RateLimits", - (_message.Message,), - { - "DESCRIPTOR": _RATELIMITS, - "__module__": "google.cloud.tasks_v2beta2.proto.queue_pb2", - "__doc__": """Rate limits. This message determines the maximum rate that tasks can - be dispatched by a queue, regardless of whether the dispatch is a - first task attempt or a retry. Note: The debugging command, - [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a - task even if the queue has reached its - [RateLimits][google.cloud.tasks.v2beta2.RateLimits]. - - Attributes: - max_tasks_dispatched_per_second: - The maximum rate at which tasks are dispatched from this - queue. If unspecified when the queue is created, Cloud Tasks - will pick the default. - For [App Engine - queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the - maximum allowed value is 500. - This field is output only - for [pull queues][google.cloud.tasks.v2beta2.PullTarget]. - In addition to the ``max_tasks_dispatched_per_second`` - limit, a maximum of 10 QPS of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - requests are allowed per pull queue. This field has the same - meaning as `rate in queue.yaml/xml `_. - max_burst_size: - Output only. The max burst size. Max burst size limits how - fast tasks in queue are processed when many tasks are in the - queue and the rate is high. This field allows the queue to - have a high rate so processing starts shortly after a task is - enqueued, but still limits resource usage when many tasks are - enqueued in a short period of time. The `token bucket - `_ algorithm is used - to control the rate of task dispatches. Each queue has a token - bucket that holds tokens, up to the maximum specified by - ``max_burst_size``. Each time a task is dispatched, a token is - removed from the bucket. Tasks will be dispatched until the - queue’s bucket runs out of tokens. The bucket will be - continuously refilled with new tokens based on [max_tasks_disp - atched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_t - asks_dispatched_per_second]. Cloud Tasks will pick the value - of ``max_burst_size`` based on the value of [max_tasks_dispatc - hed_per_second][google.cloud.tasks.v2beta2.RateLimits.max_task - s_dispatched_per_second]. For App Engine queues that were - created or updated using ``queue.yaml/xml``, - ``max_burst_size`` is equal to `bucket_size `_. Since ``max_burst_size`` is output only, if [UpdateQ - ueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is - called on a queue created by ``queue.yaml/xml``, - ``max_burst_size`` will be reset based on the value of [max_ta - sks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimi - ts.max_tasks_dispatched_per_second], regardless of whether [ma - x_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.Rate - Limits.max_tasks_dispatched_per_second] is updated. - max_concurrent_tasks: - The maximum number of concurrent tasks that Cloud Tasks allows - to be dispatched for this queue. After this threshold has been - reached, Cloud Tasks stops dispatching tasks until the number - of concurrent requests decreases. If unspecified when the - queue is created, Cloud Tasks will pick the default. The - maximum allowed value is 5,000. This field is output only for - [pull queues][google.cloud.tasks.v2beta2.PullTarget] and - always -1, which indicates no limit. No other queue types can - have ``max_concurrent_tasks`` set to -1. This field has the - same meaning as `max_concurrent_requests in queue.yaml/xml `_. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RateLimits) - }, -) -_sym_db.RegisterMessage(RateLimits) - -RetryConfig = _reflection.GeneratedProtocolMessageType( - "RetryConfig", - (_message.Message,), - { - "DESCRIPTOR": _RETRYCONFIG, - "__module__": "google.cloud.tasks_v2beta2.proto.queue_pb2", - "__doc__": """Retry config. These settings determine how a failed task attempt is - retried. - - Attributes: - num_attempts: - Number of attempts per task. If unspecified when the queue is - created, Cloud Tasks will pick the default. This field has - the same meaning as `task_retry_limit in queue.yaml/xml `_. - max_attempts: - The maximum number of attempts for a task. Cloud Tasks will - attempt the task ``max_attempts`` times (that is, if the first - attempt fails, then there will be ``max_attempts - 1`` - retries). Must be > 0. - unlimited_attempts: - If true, then the number of attempts is unlimited. - max_retry_duration: - If positive, ``max_retry_duration`` specifies the time limit - for retrying a failed task, measured from when the task was - first attempted. Once ``max_retry_duration`` time has passed - *and* the task has been attempted [max_attempts][google.cloud. - tasks.v2beta2.RetryConfig.max_attempts] times, no further - attempts will be made and the task will be deleted. If zero, - then the task age is unlimited. If unspecified when the queue - is created, Cloud Tasks will pick the default. This field is - output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. - ``max_retry_duration`` will be truncated to the nearest - second. This field has the same meaning as `task_age_limit in - queue.yaml/xml `_. - min_backoff: - A task will be - [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] for - retry between [min_backoff][google.cloud.tasks.v2beta2.RetryCo - nfig.min_backoff] and [max_backoff][google.cloud.tasks.v2beta2 - .RetryConfig.max_backoff] duration after it fails, if the - queue’s [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] - specifies that the task should be retried. If unspecified - when the queue is created, Cloud Tasks will pick the default. - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. - ``min_backoff`` will be truncated to the nearest second. This - field has the same meaning as `min_backoff_seconds in - queue.yaml/xml `_. - max_backoff: - A task will be - [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] for - retry between [min_backoff][google.cloud.tasks.v2beta2.RetryCo - nfig.min_backoff] and [max_backoff][google.cloud.tasks.v2beta2 - .RetryConfig.max_backoff] duration after it fails, if the - queue’s [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] - specifies that the task should be retried. If unspecified - when the queue is created, Cloud Tasks will pick the default. - This field is output only for [pull - queues][google.cloud.tasks.v2beta2.PullTarget]. - ``max_backoff`` will be truncated to the nearest second. This - field has the same meaning as `max_backoff_seconds in - queue.yaml/xml `_. - max_doublings: - The time between retries will double ``max_doublings`` times. - A task’s retry interval starts at [min_backoff][google.cloud.t - asks.v2beta2.RetryConfig.min_backoff], then doubles - ``max_doublings`` times, then increases linearly, and finally - retries retries at intervals of [max_backoff][google.cloud.tas - ks.v2beta2.RetryConfig.max_backoff] up to [max_attempts][googl - e.cloud.tasks.v2beta2.RetryConfig.max_attempts] times. For - example, if [min_backoff][google.cloud.tasks.v2beta2.RetryConf - ig.min_backoff] is 10s, [max_backoff][google.cloud.tasks.v2bet - a2.RetryConfig.max_backoff] is 300s, and ``max_doublings`` is - 3, then the a task will first be retried in 10s. The retry - interval will double three times, and then increase linearly - by 2^3 \* 10s. Finally, the task will retry at intervals of [m - ax_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff - ] until the task has been attempted [max_attempts][google.clou - d.tasks.v2beta2.RetryConfig.max_attempts] times. Thus, the - requests will retry at 10s, 20s, 40s, 80s, 160s, 240s, 300s, - 300s, …. If unspecified when the queue is created, Cloud - Tasks will pick the default. This field is output only for - [pull queues][google.cloud.tasks.v2beta2.PullTarget]. This - field has the same meaning as `max_doublings in queue.yaml/xml - `_. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RetryConfig) - }, -) -_sym_db.RegisterMessage(RetryConfig) - - -DESCRIPTOR._options = None -_QUEUE._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py b/google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2beta2/proto/target_pb2.py b/google/cloud/tasks_v2beta2/proto/target_pb2.py deleted file mode 100644 index cb7365b3..00000000 --- a/google/cloud/tasks_v2beta2/proto/target_pb2.py +++ /dev/null @@ -1,865 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta2/proto/target.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta2/proto/target.proto", - package="google.cloud.tasks.v2beta2", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta2B\013TargetProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n-google/cloud/tasks_v2beta2/proto/target.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto"\x0c\n\nPullTarget"+\n\x0bPullMessage\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\x12\x0b\n\x03tag\x18\x02 \x01(\t"h\n\x13\x41ppEngineHttpTarget\x12Q\n\x1b\x61pp_engine_routing_override\x18\x01 \x01(\x0b\x32,.google.cloud.tasks.v2beta2.AppEngineRouting"\xc4\x02\n\x14\x41ppEngineHttpRequest\x12;\n\x0bhttp_method\x18\x01 \x01(\x0e\x32&.google.cloud.tasks.v2beta2.HttpMethod\x12H\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32,.google.cloud.tasks.v2beta2.AppEngineRouting\x12\x14\n\x0crelative_url\x18\x03 \x01(\t\x12N\n\x07headers\x18\x04 \x03(\x0b\x32=.google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry\x12\x0f\n\x07payload\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t*[\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x42p\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0bTargetProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasksb\x06proto3', - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], -) - -_HTTPMETHOD = _descriptor.EnumDescriptor( - name="HttpMethod", - full_name="google.cloud.tasks.v2beta2.HttpMethod", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="HTTP_METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POST", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GET", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="HEAD", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PUT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DELETE", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=685, - serialized_end=776, -) -_sym_db.RegisterEnumDescriptor(_HTTPMETHOD) - -HttpMethod = enum_type_wrapper.EnumTypeWrapper(_HTTPMETHOD) -HTTP_METHOD_UNSPECIFIED = 0 -POST = 1 -GET = 2 -HEAD = 3 -PUT = 4 -DELETE = 5 - - -_PULLTARGET = _descriptor.Descriptor( - name="PullTarget", - full_name="google.cloud.tasks.v2beta2.PullTarget", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=107, - serialized_end=119, -) - - -_PULLMESSAGE = _descriptor.Descriptor( - name="PullMessage", - full_name="google.cloud.tasks.v2beta2.PullMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="payload", - full_name="google.cloud.tasks.v2beta2.PullMessage.payload", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="tag", - full_name="google.cloud.tasks.v2beta2.PullMessage.tag", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=121, - serialized_end=164, -) - - -_APPENGINEHTTPTARGET = _descriptor.Descriptor( - name="AppEngineHttpTarget", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpTarget", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="app_engine_routing_override", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=166, - serialized_end=270, -) - - -_APPENGINEHTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( - name="HeadersEntry", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=551, - serialized_end=597, -) - -_APPENGINEHTTPREQUEST = _descriptor.Descriptor( - name="AppEngineHttpRequest", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="http_method", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.http_method", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_routing", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="relative_url", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative_url", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="headers", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.headers", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="payload", - full_name="google.cloud.tasks.v2beta2.AppEngineHttpRequest.payload", - index=4, - number=5, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_APPENGINEHTTPREQUEST_HEADERSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=273, - serialized_end=597, -) - - -_APPENGINEROUTING = _descriptor.Descriptor( - name="AppEngineRouting", - full_name="google.cloud.tasks.v2beta2.AppEngineRouting", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service", - full_name="google.cloud.tasks.v2beta2.AppEngineRouting.service", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.tasks.v2beta2.AppEngineRouting.version", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="instance", - full_name="google.cloud.tasks.v2beta2.AppEngineRouting.instance", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="host", - full_name="google.cloud.tasks.v2beta2.AppEngineRouting.host", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=599, - serialized_end=683, -) - -_APPENGINEHTTPTARGET.fields_by_name[ - "app_engine_routing_override" -].message_type = _APPENGINEROUTING -_APPENGINEHTTPREQUEST_HEADERSENTRY.containing_type = _APPENGINEHTTPREQUEST -_APPENGINEHTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD -_APPENGINEHTTPREQUEST.fields_by_name[ - "app_engine_routing" -].message_type = _APPENGINEROUTING -_APPENGINEHTTPREQUEST.fields_by_name[ - "headers" -].message_type = _APPENGINEHTTPREQUEST_HEADERSENTRY -DESCRIPTOR.message_types_by_name["PullTarget"] = _PULLTARGET -DESCRIPTOR.message_types_by_name["PullMessage"] = _PULLMESSAGE -DESCRIPTOR.message_types_by_name["AppEngineHttpTarget"] = _APPENGINEHTTPTARGET -DESCRIPTOR.message_types_by_name["AppEngineHttpRequest"] = _APPENGINEHTTPREQUEST -DESCRIPTOR.message_types_by_name["AppEngineRouting"] = _APPENGINEROUTING -DESCRIPTOR.enum_types_by_name["HttpMethod"] = _HTTPMETHOD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -PullTarget = _reflection.GeneratedProtocolMessageType( - "PullTarget", - (_message.Message,), - { - "DESCRIPTOR": _PULLTARGET, - "__module__": "google.cloud.tasks_v2beta2.proto.target_pb2", - "__doc__": """Pull target.""", - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PullTarget) - }, -) -_sym_db.RegisterMessage(PullTarget) - -PullMessage = _reflection.GeneratedProtocolMessageType( - "PullMessage", - (_message.Message,), - { - "DESCRIPTOR": _PULLMESSAGE, - "__module__": "google.cloud.tasks_v2beta2.proto.target_pb2", - "__doc__": """The pull message contains data that can be used by the caller of - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] to - process the task. This proto can only be used for tasks in a queue - which has [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] - set. - - Attributes: - payload: - A data payload consumed by the worker to execute the task. - tag: - The task’s tag. Tags allow similar tasks to be processed in a - batch. If you label tasks with a tag, your worker can [lease - tasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] with - the same tag using - [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]. - For example, if you want to aggregate the events associated - with a specific user once a day, you could tag tasks with the - user ID. The task’s tag can only be set when the [task is - created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - The tag must be less than 500 characters. SDK compatibility: - Although the SDK allows tags to be either string or `bytes `_, only UTF-8 encoded tags can be used in Cloud - Tasks. If a tag isn’t UTF-8 encoded, the tag will be empty - when the task is returned by Cloud Tasks. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PullMessage) - }, -) -_sym_db.RegisterMessage(PullMessage) - -AppEngineHttpTarget = _reflection.GeneratedProtocolMessageType( - "AppEngineHttpTarget", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEHTTPTARGET, - "__module__": "google.cloud.tasks_v2beta2.proto.target_pb2", - "__doc__": """App Engine HTTP target. The task will be delivered to the App Engine - application hostname specified by its - [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] - and [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpReq - uest]. The documentation for [AppEngineHttpRequest][google.cloud.tasks - .v2beta2.AppEngineHttpRequest] explains how the task’s host URL is - constructed. Using - [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] - requires ```appengine.applications.get`` - `\_ - Google IAM permission for the project and the following scope: - ``https://www.googleapis.com/auth/cloud-platform`` - - Attributes: - app_engine_routing_override: - Overrides for the [task-level app_engine_routing][google.cloud - .tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. If - set, ``app_engine_routing_override`` is used for all tasks in - the queue, no matter what the setting is for the [task-level a - pp_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpReq - uest.app_engine_routing]. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineHttpTarget) - }, -) -_sym_db.RegisterMessage(AppEngineHttpTarget) - -AppEngineHttpRequest = _reflection.GeneratedProtocolMessageType( - "AppEngineHttpRequest", - (_message.Message,), - { - "HeadersEntry": _reflection.GeneratedProtocolMessageType( - "HeadersEntry", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEHTTPREQUEST_HEADERSENTRY, - "__module__": "google.cloud.tasks_v2beta2.proto.target_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry) - }, - ), - "DESCRIPTOR": _APPENGINEHTTPREQUEST, - "__module__": "google.cloud.tasks_v2beta2.proto.target_pb2", - "__doc__": """App Engine HTTP request. The message defines the HTTP request that is - sent to an App Engine app when the task is dispatched. This proto can - only be used for tasks in a queue which has [app_engine_http_target][g - oogle.cloud.tasks.v2beta2.Queue.app_engine_http_target] set. Using [A - ppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] - requires ```appengine.applications.get`` - `\_ - Google IAM permission for the project and the following scope: - ``https://www.googleapis.com/auth/cloud-platform`` The task will be - delivered to the App Engine app which belongs to the same project as - the queue. For more information, see `How Requests are Routed - `_ and how routing is affected by `dispatch files `_. - Traffic is encrypted during transport and never leaves Google - datacenters. Because this traffic is carried over a communication - mechanism internal to Google, you cannot explicitly set the protocol - (for example, HTTP or HTTPS). The request to the handler, however, - will appear to have used the HTTP protocol. The - [AppEngineRouting][google.cloud.tasks.v2beta2.AppEngineRouting] used - to construct the URL that the task is delivered to can be set at the - queue-level or task-level: - If set, [app_engine_routing_override - ][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_ov - erride] is used for all tasks in the queue, no matter what the - setting is for the [task-level app_engine_routing][google.cloud. - tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. The ``url`` - that the task will be sent to is: - ``url =`` - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] ``+`` [ - relative_url][google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative - _url] Tasks can be dispatched to secure app handlers, unsecure app - handlers, and URIs restricted with ```login: admin`` `_. Because - tasks are not run as any user, they cannot be dispatched to URIs - restricted with ```login: required`` `_ Task dispatches also do not - follow redirects. The task attempt has succeeded if the app’s request - handler returns an HTTP response code in the range [``200`` - - ``299``]. The task attempt has failed if the app’s handler returns a - non-2xx response code or Cloud Tasks does not receive response before - the [deadline][Task.dispatch_deadline]. Failed tasks will be retried - according to the [retry - configuration][google.cloud.tasks.v2beta2.Queue.retry_config]. ``503`` - (Service Unavailable) is considered an App Engine system error instead - of an application error and will cause Cloud Tasks’ traffic congestion - control to temporarily throttle the queue’s dispatches. Unlike other - types of task targets, a ``429`` (Too Many Requests) response from an - app handler does not cause traffic congestion control to throttle the - queue. - - Attributes: - http_method: - The HTTP method to use for the request. The default is POST. - The app’s request handler for the task’s target URL must be - able to handle HTTP requests with this http_method, otherwise - the task attempt will fail with error code 405 (Method Not - Allowed). See `Writing a push task request handler `_ and the - documentation for the request handlers in the language your - app is written in e.g. `Python Request Handler `_. - app_engine_routing: - Task-level setting for App Engine routing. If set, [app_engin - e_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTa - rget.app_engine_routing_override] is used for all tasks in the - queue, no matter what the setting is for the [task-level app_e - ngine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest - .app_engine_routing]. - relative_url: - The relative URL. The relative URL must begin with “/” and - must be a valid HTTP relative URL. It can contain a path and - query string arguments. If the relative URL is empty, then the - root path “/” will be used. No spaces are allowed, and the - maximum length allowed is 2083 characters. - headers: - HTTP request headers. This map contains the header field - names and values. Headers can be set when the [task is - created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - Repeated headers are not supported but a header value can - contain commas. Cloud Tasks sets some headers to default - values: - ``User-Agent``: By default, this header is - ``"AppEngine-Google; (+http://code.google.com/appengine)"``. - This header can be modified, but Cloud Tasks will append - ``"AppEngine-Google; (+http://code.google.com/appengine)"`` to - the modified ``User-Agent``. If the task has a [payload][g - oogle.cloud.tasks.v2beta2.AppEngineHttpRequest.payload], Cloud - Tasks sets the following headers: - ``Content-Type``: By - default, the ``Content-Type`` header is set to - ``"application/octet-stream"``. The default can be overridden - by explicitly setting ``Content-Type`` to a particular - media type when the [task is - created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/json"``. - ``Content-Length``: This is - computed by Cloud Tasks. This value is output only. It - cannot be changed. The headers below cannot be set or - overridden: - ``Host`` - ``X-Google-*`` - - ``X-AppEngine-*`` In addition, Cloud Tasks sets some headers - when the task is dispatched, such as headers containing - information about the task; see `request headers `_. These headers are set - only when the task is dispatched, so they are not visible when - the task is returned in a Cloud Tasks response. Although - there is no specific limit for the maximum number of headers - or the size, there is a limit on the maximum size of the - [Task][google.cloud.tasks.v2beta2.Task]. For more information, - see the - [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] - documentation. - payload: - Payload. The payload will be sent as the HTTP message body. A - message body, and thus a payload, is allowed only if the HTTP - method is POST or PUT. It is an error to set a data payload on - a task with an incompatible - [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineHttpRequest) - }, -) -_sym_db.RegisterMessage(AppEngineHttpRequest) -_sym_db.RegisterMessage(AppEngineHttpRequest.HeadersEntry) - -AppEngineRouting = _reflection.GeneratedProtocolMessageType( - "AppEngineRouting", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEROUTING, - "__module__": "google.cloud.tasks_v2beta2.proto.target_pb2", - "__doc__": """App Engine Routing. Defines routing characteristics specific to App - Engine - service, version, and instance. For more information about - services, versions, and instances see `An Overview of App Engine - `_, `Microservices Architecture on Google App Engine - `_, `App Engine Standard request routing - `_, and `App Engine Flex request routing - `_. - - Attributes: - service: - App service. By default, the task is sent to the service - which is the default service when the task is attempted. For - some queues or tasks which were created using the App Engine - Task Queue API, - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable into [service][google.cloud.tasks.v2beta2.AppEngi - neRouting.service], [version][google.cloud.tasks.v2beta2.AppEn - gineRouting.version], and [instance][google.cloud.tasks.v2beta - 2.AppEngineRouting.instance]. For example, some tasks which - were created using the App Engine SDK use a custom domain - name; custom domains are not parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable, then [service][google.cloud.tasks.v2beta2.AppEng - ineRouting.service], [version][google.cloud.tasks.v2beta2.AppE - ngineRouting.version], and [instance][google.cloud.tasks.v2bet - a2.AppEngineRouting.instance] are the empty string. - version: - App version. By default, the task is sent to the version - which is the default version when the task is attempted. For - some queues or tasks which were created using the App Engine - Task Queue API, - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable into [service][google.cloud.tasks.v2beta2.AppEngi - neRouting.service], [version][google.cloud.tasks.v2beta2.AppEn - gineRouting.version], and [instance][google.cloud.tasks.v2beta - 2.AppEngineRouting.instance]. For example, some tasks which - were created using the App Engine SDK use a custom domain - name; custom domains are not parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is - not parsable, then [service][google.cloud.tasks.v2beta2.AppEng - ineRouting.service], [version][google.cloud.tasks.v2beta2.AppE - ngineRouting.version], and [instance][google.cloud.tasks.v2bet - a2.AppEngineRouting.instance] are the empty string. - instance: - App instance. By default, the task is sent to an instance - which is available when the task is attempted. Requests can - only be sent to a specific instance if `manual scaling is used - in App Engine Standard - `_. - App Engine Flex does not support instances. For more - information, see `App Engine Standard request routing - `_ and `App Engine Flex request routing - `_. - host: - Output only. The host that the task is sent to. For more - information, see `How Requests are Routed - `_. The host is constructed as: - - ``host = [application_domain_name]``\ ``| [service] + '.' - + [application_domain_name]``\ ``| [version] + '.' + - [application_domain_name]``\ ``| [version_dot_service]+ - '.' + [application_domain_name]``\ ``| [instance] + '.' + - [application_domain_name]``\ ``| [instance_dot_service] + - '.' + [application_domain_name]``\ ``| - [instance_dot_version] + '.' + [application_domain_name]``\ - ``| [instance_dot_version_dot_service] + '.' + - [application_domain_name]`` - ``application_domain_name`` = - The domain name of the app, for example .appspot.com, which - is associated with the queue’s project ID. Some tasks which - were created using the App Engine SDK use a custom domain - name. - ``service =`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - - ``version =`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - - ``version_dot_service =`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - ``+ '.' +`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - - ``instance =`` [instance][google.cloud.tasks.v2beta2.App - EngineRouting.instance] - ``instance_dot_service =`` [ins - tance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] - ``+ '.' +`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - - ``instance_dot_version =`` [instance][google.cloud.tasks - .v2beta2.AppEngineRouting.instance] ``+ '.' +`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - - ``instance_dot_version_dot_service =`` [instance][google - .cloud.tasks.v2beta2.AppEngineRouting.instance] ``+ '.' +`` - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - ``+ '.' +`` - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - If - [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] - is empty, then the task will be sent to the service which is - the default service when the task is attempted. If - [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] - is empty, then the task will be sent to the version which is - the default version when the task is attempted. If [instance] - [google.cloud.tasks.v2beta2.AppEngineRouting.instance] is - empty, then the task will be sent to an instance which is - available when the task is attempted. If [service][google.clo - ud.tasks.v2beta2.AppEngineRouting.service], [version][google.c - loud.tasks.v2beta2.AppEngineRouting.version], or [instance][go - ogle.cloud.tasks.v2beta2.AppEngineRouting.instance] is - invalid, then the task will be sent to the default version of - the default service when the task is attempted. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineRouting) - }, -) -_sym_db.RegisterMessage(AppEngineRouting) - - -DESCRIPTOR._options = None -_APPENGINEHTTPREQUEST_HEADERSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py b/google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2beta2/proto/task_pb2.py b/google/cloud/tasks_v2beta2/proto/task_pb2.py deleted file mode 100644 index 8a1680ed..00000000 --- a/google/cloud/tasks_v2beta2/proto/task_pb2.py +++ /dev/null @@ -1,626 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta2/proto/task.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2beta2.proto import ( - target_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta2/proto/task.proto", - package="google.cloud.tasks.v2beta2", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta2B\tTaskProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n+google/cloud/tasks_v2beta2/proto/task.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x19google/api/resource.proto\x1a-google/cloud/tasks_v2beta2/proto/target.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xa8\x04\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12S\n\x17\x61pp_engine_http_request\x18\x03 \x01(\x0b\x32\x30.google.cloud.tasks.v2beta2.AppEngineHttpRequestH\x00\x12?\n\x0cpull_message\x18\x04 \x01(\x0b\x32\'.google.cloud.tasks.v2beta2.PullMessageH\x00\x12\x31\n\rschedule_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06status\x18\x07 \x01(\x0b\x32&.google.cloud.tasks.v2beta2.TaskStatus\x12\x33\n\x04view\x18\x08 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cpayload_type"\xdd\x01\n\nTaskStatus\x12\x1e\n\x16\x61ttempt_dispatch_count\x18\x01 \x01(\x05\x12\x1e\n\x16\x61ttempt_response_count\x18\x02 \x01(\x05\x12G\n\x14\x66irst_attempt_status\x18\x03 \x01(\x0b\x32).google.cloud.tasks.v2beta2.AttemptStatus\x12\x46\n\x13last_attempt_status\x18\x04 \x01(\x0b\x32).google.cloud.tasks.v2beta2.AttemptStatus"\xd5\x01\n\rAttemptStatus\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBn\n\x1e\x63om.google.cloud.tasks.v2beta2B\tTaskProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasksb\x06proto3', - dependencies=[ - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_TASK_VIEW = _descriptor.EnumDescriptor( - name="View", - full_name="google.cloud.tasks.v2beta2.Task.View", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="VIEW_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BASIC", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FULL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=619, - serialized_end=668, -) -_sym_db.RegisterEnumDescriptor(_TASK_VIEW) - - -_TASK = _descriptor.Descriptor( - name="Task", - full_name="google.cloud.tasks.v2beta2.Task", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta2.Task.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_http_request", - full_name="google.cloud.tasks.v2beta2.Task.app_engine_http_request", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="pull_message", - full_name="google.cloud.tasks.v2beta2.Task.pull_message", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2beta2.Task.schedule_time", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.tasks.v2beta2.Task.create_time", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.cloud.tasks.v2beta2.Task.status", - index=5, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="view", - full_name="google.cloud.tasks.v2beta2.Task.view", - index=6, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_TASK_VIEW,], - serialized_options=b"\352Ae\n\036cloudtasks.googleapis.com/Task\022Cprojects/{project}/locations/{location}/queues/{queue}/tasks/{task}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload_type", - full_name="google.cloud.tasks.v2beta2.Task.payload_type", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=238, - serialized_end=790, -) - - -_TASKSTATUS = _descriptor.Descriptor( - name="TaskStatus", - full_name="google.cloud.tasks.v2beta2.TaskStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="attempt_dispatch_count", - full_name="google.cloud.tasks.v2beta2.TaskStatus.attempt_dispatch_count", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attempt_response_count", - full_name="google.cloud.tasks.v2beta2.TaskStatus.attempt_response_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="first_attempt_status", - full_name="google.cloud.tasks.v2beta2.TaskStatus.first_attempt_status", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="last_attempt_status", - full_name="google.cloud.tasks.v2beta2.TaskStatus.last_attempt_status", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=793, - serialized_end=1014, -) - - -_ATTEMPTSTATUS = _descriptor.Descriptor( - name="AttemptStatus", - full_name="google.cloud.tasks.v2beta2.AttemptStatus", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2beta2.AttemptStatus.schedule_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dispatch_time", - full_name="google.cloud.tasks.v2beta2.AttemptStatus.dispatch_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_time", - full_name="google.cloud.tasks.v2beta2.AttemptStatus.response_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_status", - full_name="google.cloud.tasks.v2beta2.AttemptStatus.response_status", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1017, - serialized_end=1230, -) - -_TASK.fields_by_name[ - "app_engine_http_request" -].message_type = ( - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._APPENGINEHTTPREQUEST -) -_TASK.fields_by_name[ - "pull_message" -].message_type = ( - google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._PULLMESSAGE -) -_TASK.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TASK.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TASK.fields_by_name["status"].message_type = _TASKSTATUS -_TASK.fields_by_name["view"].enum_type = _TASK_VIEW -_TASK_VIEW.containing_type = _TASK -_TASK.oneofs_by_name["payload_type"].fields.append( - _TASK.fields_by_name["app_engine_http_request"] -) -_TASK.fields_by_name["app_engine_http_request"].containing_oneof = _TASK.oneofs_by_name[ - "payload_type" -] -_TASK.oneofs_by_name["payload_type"].fields.append(_TASK.fields_by_name["pull_message"]) -_TASK.fields_by_name["pull_message"].containing_oneof = _TASK.oneofs_by_name[ - "payload_type" -] -_TASKSTATUS.fields_by_name["first_attempt_status"].message_type = _ATTEMPTSTATUS -_TASKSTATUS.fields_by_name["last_attempt_status"].message_type = _ATTEMPTSTATUS -_ATTEMPTSTATUS.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPTSTATUS.fields_by_name[ - "dispatch_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPTSTATUS.fields_by_name[ - "response_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPTSTATUS.fields_by_name[ - "response_status" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -DESCRIPTOR.message_types_by_name["Task"] = _TASK -DESCRIPTOR.message_types_by_name["TaskStatus"] = _TASKSTATUS -DESCRIPTOR.message_types_by_name["AttemptStatus"] = _ATTEMPTSTATUS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Task = _reflection.GeneratedProtocolMessageType( - "Task", - (_message.Message,), - { - "DESCRIPTOR": _TASK, - "__module__": "google.cloud.tasks_v2beta2.proto.task_pb2", - "__doc__": """A unit of scheduled work. - - Attributes: - name: - Optionally caller-specified in [CreateTask][google.cloud.tasks - .v2beta2.CloudTasks.CreateTask]. The task name. The task - name must have the following format: ``projects/PROJECT_ID/loc - ations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For more - information, see `Identifying projects - `_ - ``LOCATION_ID`` - is the canonical ID for the task’s location. The list of - available locations can be obtained by calling [ListLocatio - ns][google.cloud.location.Locations.ListLocations]. For - more information, see - https://cloud.google.com/about/locations/. - ``QUEUE_ID`` can - contain letters ([A-Za-z]), numbers ([0-9]), or hyphens - (-). The maximum length is 100 characters. - ``TASK_ID`` can - contain only letters ([A-Za-z]), numbers ([0-9]), hyphens - (-), or underscores (_). The maximum length is 500 - characters. - payload_type: - Required. The task’s payload is used by the task’s target to - process the task. A payload is valid only if it is compatible - with the queue’s target. - app_engine_http_request: - App Engine HTTP request that is sent to the task’s target. Can - be set only if [app_engine_http_target][google.cloud.tasks.v2b - eta2.Queue.app_engine_http_target] is set on the queue. An - App Engine task is a task that has [AppEngineHttpRequest][goog - le.cloud.tasks.v2beta2.AppEngineHttpRequest] set. - pull_message: - [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] - to process the task. Can be set only if - [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] is - set on the queue. A pull task is a task that has - [PullMessage][google.cloud.tasks.v2beta2.PullMessage] set. - schedule_time: - The time when the task is scheduled to be attempted. For App - Engine queues, this is when the task will be attempted or - retried. For pull queues, this is the time when the task is - available to be leased; if a task is currently leased, this is - the time when the current lease expires, that is, the time - that the task was leased plus the [lease_duration][google.clou - d.tasks.v2beta2.LeaseTasksRequest.lease_duration]. - ``schedule_time`` will be truncated to the nearest - microsecond. - create_time: - Output only. The time that the task was created. - ``create_time`` will be truncated to the nearest second. - status: - Output only. The task status. - view: - Output only. The view specifies which subset of the - [Task][google.cloud.tasks.v2beta2.Task] has been returned. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.Task) - }, -) -_sym_db.RegisterMessage(Task) - -TaskStatus = _reflection.GeneratedProtocolMessageType( - "TaskStatus", - (_message.Message,), - { - "DESCRIPTOR": _TASKSTATUS, - "__module__": "google.cloud.tasks_v2beta2.proto.task_pb2", - "__doc__": """Status of the task. - - Attributes: - attempt_dispatch_count: - Output only. The number of attempts dispatched. This count - includes attempts which have been dispatched but haven’t - received a response. - attempt_response_count: - Output only. The number of attempts which have received a - response. This field is not calculated for [pull - tasks][google.cloud.tasks.v2beta2.PullMessage]. - first_attempt_status: - Output only. The status of the task’s first attempt. Only [di - spatch_time][google.cloud.tasks.v2beta2.AttemptStatus.dispatch - _time] will be set. The other - [AttemptStatus][google.cloud.tasks.v2beta2.AttemptStatus] - information is not retained by Cloud Tasks. This field is not - calculated for [pull - tasks][google.cloud.tasks.v2beta2.PullMessage]. - last_attempt_status: - Output only. The status of the task’s last attempt. This - field is not calculated for [pull - tasks][google.cloud.tasks.v2beta2.PullMessage]. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.TaskStatus) - }, -) -_sym_db.RegisterMessage(TaskStatus) - -AttemptStatus = _reflection.GeneratedProtocolMessageType( - "AttemptStatus", - (_message.Message,), - { - "DESCRIPTOR": _ATTEMPTSTATUS, - "__module__": "google.cloud.tasks_v2beta2.proto.task_pb2", - "__doc__": """The status of a task attempt. - - Attributes: - schedule_time: - Output only. The time that this attempt was scheduled. - ``schedule_time`` will be truncated to the nearest - microsecond. - dispatch_time: - Output only. The time that this attempt was dispatched. - ``dispatch_time`` will be truncated to the nearest - microsecond. - response_time: - Output only. The time that this attempt response was received. - ``response_time`` will be truncated to the nearest - microsecond. - response_status: - Output only. The response from the target for this attempt. - If the task has not been attempted or the task is currently - running then the response status is unset. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AttemptStatus) - }, -) -_sym_db.RegisterMessage(AttemptStatus) - - -DESCRIPTOR._options = None -_TASK._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py b/google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2beta2/py.typed b/google/cloud/tasks_v2beta2/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/google/cloud/tasks_v2beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/google/cloud/tasks_v2beta2/services/__init__.py b/google/cloud/tasks_v2beta2/services/__init__.py new file mode 100644 index 00000000..42ffdf2b --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/__init__.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py similarity index 71% rename from google/cloud/__init__.py rename to google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py index 9a1b64a6..498f5941 100644 --- a/google/cloud/__init__.py +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py @@ -1,24 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil +from .client import CloudTasksClient +from .async_client import CloudTasksAsyncClient - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "CloudTasksClient", + "CloudTasksAsyncClient", +) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py new file mode 100644 index 00000000..6e198977 --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py @@ -0,0 +1,2158 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .client import CloudTasksClient + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + task_path = staticmethod(CloudTasksClient.task_path) + + queue_path = staticmethod(CloudTasksClient.queue_path) + + from_service_account_file = CloudTasksClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(CloudTasksClient).get_transport_class, type(CloudTasksClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_queues( + self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`~.cloudtasks.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue( + self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`~.cloudtasks.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_queue( + self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] + cannot be the same as an existing queue. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, queue]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_queue( + self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + changed. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([queue, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_queue( + self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def purge_queue( + self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`~.cloudtasks.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def pause_queue( + self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Args: + request (:class:`~.cloudtasks.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def resume_queue( + self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`~.cloudtasks.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.GetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.SetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource, permissions]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + elif not request: + request = iam_policy.TestIamPermissionsRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_tasks( + self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`~.cloudtasks.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task( + self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`~.cloudtasks.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_task( + self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Args: + request (:class:`~.cloudtasks.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`~.gct_task.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or completed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or completed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + completed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, task]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_task( + self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Args: + request (:class:`~.cloudtasks.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def lease_tasks( + self, + request: cloudtasks.LeaseTasksRequest = None, + *, + parent: str = None, + lease_duration: duration.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.LeaseTasksResponse: + r"""Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Args: + request (:class:`~.cloudtasks.LeaseTasksRequest`): + The request object. Request message for leasing tasks + using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`~.duration.Duration`): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.LeaseTasksResponse: + Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, lease_duration]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.LeaseTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lease_tasks, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def acknowledge_task( + self, + request: cloudtasks.AcknowledgeTaskRequest = None, + *, + name: str = None, + schedule_time: timestamp.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Args: + request (:class:`~.cloudtasks.AcknowledgeTaskRequest`): + The request object. Request message for acknowledging a + task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`~.timestamp.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, schedule_time]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.AcknowledgeTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.acknowledge_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def renew_lease( + self, + request: cloudtasks.RenewLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp.Timestamp = None, + lease_duration: duration.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Args: + request (:class:`~.cloudtasks.RenewLeaseRequest`): + The request object. Request message for renewing a lease + using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`~.timestamp.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`~.duration.Duration`): + Required. The desired new lease duration, starting from + now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, schedule_time, lease_duration]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RenewLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.renew_lease, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_lease( + self, + request: cloudtasks.CancelLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Args: + request (:class:`~.cloudtasks.CancelLeaseRequest`): + The request object. Request message for canceling a + lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`~.timestamp.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, schedule_time]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CancelLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_lease, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def run_task( + self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Args: + request (:class:`~.cloudtasks.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudTasksAsyncClient",) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py new file mode 100644 index 00000000..2cace164 --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py @@ -0,0 +1,2274 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[CloudTasksTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def queue_path(project: str, location: str, queue: str,) -> str: + """Return a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, location=location, queue=queue, + ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str, str]: + """Parse a queue path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str, location: str, queue: str, task: str,) -> str: + """Return a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, location=location, queue=queue, task=task, + ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str, str]: + """Parse a task path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_queues( + self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`~.cloudtasks.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue( + self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`~.cloudtasks.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_queue( + self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] + cannot be the same as an existing queue. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_queue( + self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + changed. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_queue( + self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def purge_queue( + self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`~.cloudtasks.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def pause_queue( + self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Args: + request (:class:`~.cloudtasks.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def resume_queue( + self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`~.cloudtasks.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.GetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.SetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + elif not request: + request = iam_policy.TestIamPermissionsRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_tasks( + self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`~.cloudtasks.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task( + self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`~.cloudtasks.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_task( + self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Args: + request (:class:`~.cloudtasks.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`~.gct_task.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or completed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or completed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + completed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_task( + self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Args: + request (:class:`~.cloudtasks.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def lease_tasks( + self, + request: cloudtasks.LeaseTasksRequest = None, + *, + parent: str = None, + lease_duration: duration.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.LeaseTasksResponse: + r"""Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Args: + request (:class:`~.cloudtasks.LeaseTasksRequest`): + The request object. Request message for leasing tasks + using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`~.duration.Duration`): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.LeaseTasksResponse: + Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.LeaseTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.LeaseTasksRequest): + request = cloudtasks.LeaseTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lease_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def acknowledge_task( + self, + request: cloudtasks.AcknowledgeTaskRequest = None, + *, + name: str = None, + schedule_time: timestamp.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Args: + request (:class:`~.cloudtasks.AcknowledgeTaskRequest`): + The request object. Request message for acknowledging a + task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`~.timestamp.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.AcknowledgeTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.AcknowledgeTaskRequest): + request = cloudtasks.AcknowledgeTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def renew_lease( + self, + request: cloudtasks.RenewLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp.Timestamp = None, + lease_duration: duration.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Args: + request (:class:`~.cloudtasks.RenewLeaseRequest`): + The request object. Request message for renewing a lease + using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`~.timestamp.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`~.duration.Duration`): + Required. The desired new lease duration, starting from + now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RenewLeaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RenewLeaseRequest): + request = cloudtasks.RenewLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.renew_lease] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_lease( + self, + request: cloudtasks.CancelLeaseRequest = None, + *, + name: str = None, + schedule_time: timestamp.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Args: + request (:class:`~.cloudtasks.CancelLeaseRequest`): + The request object. Request message for canceling a + lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`~.timestamp.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CancelLeaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CancelLeaseRequest): + request = cloudtasks.CancelLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_lease] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def run_task( + self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Args: + request (:class:`~.cloudtasks.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudTasksClient",) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py new file mode 100644 index 00000000..6a5b4ad7 --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListQueuesRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListQueuesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListQueuesRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListQueuesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListTasksRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListTasksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListTasksRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListTasksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py new file mode 100644 index 00000000..72f33c1b --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry["grpc"] = CloudTasksGrpcTransport +_transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + +__all__ = ( + "CloudTasksTransport", + "CloudTasksGrpcTransport", + "CloudTasksGrpcAsyncIOTransport", +) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py new file mode 100644 index 00000000..17ac8ced --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, default_timeout=10.0, client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, default_timeout=10.0, client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, default_timeout=10.0, client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, default_timeout=10.0, client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, default_timeout=10.0, client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, default_timeout=10.0, client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, default_timeout=10.0, client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.lease_tasks: gapic_v1.method.wrap_method( + self.lease_tasks, default_timeout=10.0, client_info=client_info, + ), + self.acknowledge_task: gapic_v1.method.wrap_method( + self.acknowledge_task, default_timeout=10.0, client_info=client_info, + ), + self.renew_lease: gapic_v1.method.wrap_method( + self.renew_lease, default_timeout=10.0, client_info=client_info, + ), + self.cancel_lease: gapic_v1.method.wrap_method( + self.cancel_lease, default_timeout=10.0, client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, default_timeout=10.0, client_info=client_info, + ), + } + + @property + def list_queues( + self, + ) -> typing.Callable[ + [cloudtasks.ListQueuesRequest], + typing.Union[ + cloudtasks.ListQueuesResponse, + typing.Awaitable[cloudtasks.ListQueuesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_queue( + self, + ) -> typing.Callable[ + [cloudtasks.GetQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def create_queue( + self, + ) -> typing.Callable[ + [cloudtasks.CreateQueueRequest], + typing.Union[gct_queue.Queue, typing.Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def update_queue( + self, + ) -> typing.Callable[ + [cloudtasks.UpdateQueueRequest], + typing.Union[gct_queue.Queue, typing.Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def delete_queue( + self, + ) -> typing.Callable[ + [cloudtasks.DeleteQueueRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def purge_queue( + self, + ) -> typing.Callable[ + [cloudtasks.PurgeQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def pause_queue( + self, + ) -> typing.Callable[ + [cloudtasks.PauseQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def resume_queue( + self, + ) -> typing.Callable[ + [cloudtasks.ResumeQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_tasks( + self, + ) -> typing.Callable[ + [cloudtasks.ListTasksRequest], + typing.Union[ + cloudtasks.ListTasksResponse, typing.Awaitable[cloudtasks.ListTasksResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_task( + self, + ) -> typing.Callable[ + [cloudtasks.GetTaskRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + @property + def create_task( + self, + ) -> typing.Callable[ + [cloudtasks.CreateTaskRequest], + typing.Union[gct_task.Task, typing.Awaitable[gct_task.Task]], + ]: + raise NotImplementedError() + + @property + def delete_task( + self, + ) -> typing.Callable[ + [cloudtasks.DeleteTaskRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def lease_tasks( + self, + ) -> typing.Callable[ + [cloudtasks.LeaseTasksRequest], + typing.Union[ + cloudtasks.LeaseTasksResponse, + typing.Awaitable[cloudtasks.LeaseTasksResponse], + ], + ]: + raise NotImplementedError() + + @property + def acknowledge_task( + self, + ) -> typing.Callable[ + [cloudtasks.AcknowledgeTaskRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def renew_lease( + self, + ) -> typing.Callable[ + [cloudtasks.RenewLeaseRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + @property + def cancel_lease( + self, + ) -> typing.Callable[ + [cloudtasks.CancelLeaseRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + @property + def run_task( + self, + ) -> typing.Callable[ + [cloudtasks.RunTaskRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + +__all__ = ("CloudTasksTransport",) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py new file mode 100644 index 00000000..224f657c --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,914 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue(self) -> Callable[[cloudtasks.DeleteQueueRequest], empty.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def lease_tasks( + self, + ) -> Callable[[cloudtasks.LeaseTasksRequest], cloudtasks.LeaseTasksResponse]: + r"""Return a callable for the lease tasks method over gRPC. + + Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Returns: + Callable[[~.LeaseTasksRequest], + ~.LeaseTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lease_tasks" not in self._stubs: + self._stubs["lease_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", + request_serializer=cloudtasks.LeaseTasksRequest.serialize, + response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, + ) + return self._stubs["lease_tasks"] + + @property + def acknowledge_task( + self, + ) -> Callable[[cloudtasks.AcknowledgeTaskRequest], empty.Empty]: + r"""Return a callable for the acknowledge task method over gRPC. + + Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Returns: + Callable[[~.AcknowledgeTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge_task" not in self._stubs: + self._stubs["acknowledge_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", + request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["acknowledge_task"] + + @property + def renew_lease(self) -> Callable[[cloudtasks.RenewLeaseRequest], task.Task]: + r"""Return a callable for the renew lease method over gRPC. + + Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Returns: + Callable[[~.RenewLeaseRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "renew_lease" not in self._stubs: + self._stubs["renew_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", + request_serializer=cloudtasks.RenewLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["renew_lease"] + + @property + def cancel_lease(self) -> Callable[[cloudtasks.CancelLeaseRequest], task.Task]: + r"""Return a callable for the cancel lease method over gRPC. + + Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Returns: + Callable[[~.CancelLeaseRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_lease" not in self._stubs: + self._stubs["cancel_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", + request_serializer=cloudtasks.CancelLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["cancel_lease"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + +__all__ = ("CloudTasksGrpcTransport",) diff --git a/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 00000000..62acdc4d --- /dev/null +++ b/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,932 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], Awaitable[cloudtasks.ListQueuesResponse] + ]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue( + self, + ) -> Callable[[cloudtasks.GetQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue( + self, + ) -> Callable[[cloudtasks.PurgeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue( + self, + ) -> Callable[[cloudtasks.PauseQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue( + self, + ) -> Callable[[cloudtasks.ResumeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], Awaitable[cloudtasks.ListTasksResponse] + ]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task( + self, + ) -> Callable[[cloudtasks.CreateTaskRequest], Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task( + self, + ) -> Callable[[cloudtasks.DeleteTaskRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def lease_tasks( + self, + ) -> Callable[ + [cloudtasks.LeaseTasksRequest], Awaitable[cloudtasks.LeaseTasksResponse] + ]: + r"""Return a callable for the lease tasks method over gRPC. + + Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Returns: + Callable[[~.LeaseTasksRequest], + Awaitable[~.LeaseTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lease_tasks" not in self._stubs: + self._stubs["lease_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", + request_serializer=cloudtasks.LeaseTasksRequest.serialize, + response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, + ) + return self._stubs["lease_tasks"] + + @property + def acknowledge_task( + self, + ) -> Callable[[cloudtasks.AcknowledgeTaskRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the acknowledge task method over gRPC. + + Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Returns: + Callable[[~.AcknowledgeTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge_task" not in self._stubs: + self._stubs["acknowledge_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", + request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["acknowledge_task"] + + @property + def renew_lease( + self, + ) -> Callable[[cloudtasks.RenewLeaseRequest], Awaitable[task.Task]]: + r"""Return a callable for the renew lease method over gRPC. + + Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Returns: + Callable[[~.RenewLeaseRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "renew_lease" not in self._stubs: + self._stubs["renew_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", + request_serializer=cloudtasks.RenewLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["renew_lease"] + + @property + def cancel_lease( + self, + ) -> Callable[[cloudtasks.CancelLeaseRequest], Awaitable[task.Task]]: + r"""Return a callable for the cancel lease method over gRPC. + + Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Returns: + Callable[[~.CancelLeaseRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_lease" not in self._stubs: + self._stubs["cancel_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", + request_serializer=cloudtasks.CancelLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["cancel_lease"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + +__all__ = ("CloudTasksGrpcAsyncIOTransport",) diff --git a/google/cloud/tasks_v2beta2/types.py b/google/cloud/tasks_v2beta2/types.py deleted file mode 100644 index 6d6a6d6e..00000000 --- a/google/cloud/tasks_v2beta2/types.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 -from google.cloud.tasks_v2beta2.proto import queue_pb2 -from google.cloud.tasks_v2beta2.proto import target_pb2 -from google.cloud.tasks_v2beta2.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 -from google.type import expr_pb2 - - -_shared_modules = [ - iam_policy_pb2, - options_pb2, - policy_pb2, - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, - expr_pb2, -] - -_local_modules = [ - cloudtasks_pb2, - queue_pb2, - target_pb2, - task_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.tasks_v2beta2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/tasks_v2beta2/types/__init__.py b/google/cloud/tasks_v2beta2/types/__init__.py new file mode 100644 index 00000000..6d01818a --- /dev/null +++ b/google/cloud/tasks_v2beta2/types/__init__.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .target import ( + PullTarget, + PullMessage, + AppEngineHttpTarget, + AppEngineHttpRequest, + AppEngineRouting, +) +from .queue import ( + Queue, + RateLimits, + RetryConfig, +) +from .task import ( + Task, + TaskStatus, + AttemptStatus, +) +from .cloudtasks import ( + ListQueuesRequest, + ListQueuesResponse, + GetQueueRequest, + CreateQueueRequest, + UpdateQueueRequest, + DeleteQueueRequest, + PurgeQueueRequest, + PauseQueueRequest, + ResumeQueueRequest, + ListTasksRequest, + ListTasksResponse, + GetTaskRequest, + CreateTaskRequest, + DeleteTaskRequest, + LeaseTasksRequest, + LeaseTasksResponse, + AcknowledgeTaskRequest, + RenewLeaseRequest, + CancelLeaseRequest, + RunTaskRequest, +) + + +__all__ = ( + "PullTarget", + "PullMessage", + "AppEngineHttpTarget", + "AppEngineHttpRequest", + "AppEngineRouting", + "Queue", + "RateLimits", + "RetryConfig", + "Task", + "TaskStatus", + "AttemptStatus", + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "LeaseTasksRequest", + "LeaseTasksResponse", + "AcknowledgeTaskRequest", + "RenewLeaseRequest", + "CancelLeaseRequest", + "RunTaskRequest", +) diff --git a/google/cloud/tasks_v2beta2/types/cloudtasks.py b/google/cloud/tasks_v2beta2/types/cloudtasks.py new file mode 100644 index 00000000..7edb20d1 --- /dev/null +++ b/google/cloud/tasks_v2beta2/types/cloudtasks.py @@ -0,0 +1,725 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={ + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "LeaseTasksRequest", + "LeaseTasksResponse", + "AcknowledgeTaskRequest", + "RenewLeaseRequest", + "CancelLeaseRequest", + "RunTaskRequest", + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2beta2.Queue] field can be used + as a filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "app_engine_http_target: \*". + + Note that using filters might cause fewer queues than the + requested_page size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter] + while iterating through pages. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Attributes: + queues (Sequence[~.gct_queue.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2beta2.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues = proto.RepeatedField(proto.MESSAGE, number=1, message=gct_queue.Queue,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (~.gct_queue.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] cannot + be the same as an existing queue. + """ + + parent = proto.Field(proto.STRING, number=1) + + queue = proto.Field(proto.MESSAGE, number=2, message=gct_queue.Queue,) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + + Attributes: + queue (~.gct_queue.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2beta2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2beta2.Queue.name] + cannot be changed. + update_mask (~.field_mask.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue = proto.Field(proto.MESSAGE, number=1, message=gct_queue.Queue,) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + page_size = proto.Field(proto.INT32, number=4) + + page_token = proto.Field(proto.STRING, number=5) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Attributes: + tasks (Sequence[~.gct_task.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + with this value as the + [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks = proto.RepeatedField(proto.MESSAGE, number=1, message=gct_task.Task,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (~.gct_task.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name is + not specified then the system will generate a random unique + task id, which will be set in the task returned in the + [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or completed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour after + the original task was deleted or completed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9days + after the original task was deleted or completed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + parent = proto.Field(proto.STRING, number=1) + + task = proto.Field(proto.MESSAGE, number=2, message=gct_task.Task,) + + response_view = proto.Field(proto.ENUM, number=3, enum=gct_task.Task.View,) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class LeaseTasksRequest(proto.Message): + r"""Request message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + max_tasks (int): + The maximum number of tasks to lease. + + The system will make a best effort to return as close to as + ``max_tasks`` as possible. + + The largest that ``max_tasks`` can be is 1000. + + The maximum total size of a [lease tasks + response][google.cloud.tasks.v2beta2.LeaseTasksResponse] is + 32 MB. If the sum of all task sizes requested reaches this + limit, fewer tasks than requested are returned. + lease_duration (~.duration.Duration): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + filter (str): + ``filter`` can be used to specify a subset of tasks to + lease. + + When ``filter`` is set to ``tag=`` then the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will contain only tasks whose + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal + to ````. ```` must be less than 500 + characters. + + When ``filter`` is set to ``tag_function=oldest_tag()``, + only tasks which have the same tag as the task with the + oldest + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be returned. + + Grammar Syntax: + + - ``filter = "tag=" tag | "tag_function=" function`` + + - ``tag = string`` + + - ``function = "oldest_tag()"`` + + The ``oldest_tag()`` function returns tasks which have the + same tag as the oldest task (ordered by schedule time). + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. Tag + which aren't UTF-8 encoded can't be used in the + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter] + and the task's + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be + displayed as empty in Cloud Tasks. + """ + + parent = proto.Field(proto.STRING, number=1) + + max_tasks = proto.Field(proto.INT32, number=2) + + lease_duration = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + + response_view = proto.Field(proto.ENUM, number=4, enum=gct_task.Task.View,) + + filter = proto.Field(proto.STRING, number=5) + + +class LeaseTasksResponse(proto.Message): + r"""Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Attributes: + tasks (Sequence[~.gct_task.Task]): + The leased tasks. + """ + + tasks = proto.RepeatedField(proto.MESSAGE, number=1, message=gct_task.Task,) + + +class AcknowledgeTaskRequest(proto.Message): + r"""Request message for acknowledging a task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (~.timestamp.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + """ + + name = proto.Field(proto.STRING, number=1) + + schedule_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + +class RenewLeaseRequest(proto.Message): + r"""Request message for renewing a lease using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (~.timestamp.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + lease_duration (~.duration.Duration): + Required. The desired new lease duration, starting from now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + schedule_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + lease_duration = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + + response_view = proto.Field(proto.ENUM, number=4, enum=gct_task.Task.View,) + + +class CancelLeaseRequest(proto.Message): + r"""Request message for canceling a lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (~.timestamp.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + schedule_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + response_view = proto.Field(proto.ENUM, number=3, enum=gct_task.Task.View,) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta2/types/queue.py b/google/cloud/tasks_v2beta2/types/queue.py new file mode 100644 index 00000000..2d43a321 --- /dev/null +++ b/google/cloud/tasks_v2beta2/types/queue.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2beta2.types import target +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={"Queue", "RateLimits", "RetryConfig",}, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + target types, and others. + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_http_target (~.target.AppEngineHttpTarget): + App Engine HTTP target. + + An App Engine queue is a queue that has an + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget]. + pull_target (~.target.PullTarget): + Pull target. + + A pull queue is a queue that has a + [PullTarget][google.cloud.tasks.v2beta2.PullTarget]. + rate_limits (~.queue.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] + and + [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + are related because they both control task attempts however + they control how tasks are attempted in different ways: + + - [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + retry_config (~.queue.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (~.queue.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by called + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (~.timestamp.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2beta2.Task.create_time] + before this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + """ + + class State(proto.Enum): + r"""State of the queue.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + name = proto.Field(proto.STRING, number=1) + + app_engine_http_target = proto.Field( + proto.MESSAGE, + number=3, + oneof="target_type", + message=target.AppEngineHttpTarget, + ) + + pull_target = proto.Field( + proto.MESSAGE, number=4, oneof="target_type", message=target.PullTarget, + ) + + rate_limits = proto.Field(proto.MESSAGE, number=5, message="RateLimits",) + + retry_config = proto.Field(proto.MESSAGE, number=6, message="RetryConfig",) + + state = proto.Field(proto.ENUM, number=7, enum=State,) + + purge_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a + task even if the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits]. + + Attributes: + max_tasks_dispatched_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], + the maximum allowed value is 500. + - This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. In + addition to the ``max_tasks_dispatched_per_second`` + limit, a maximum of 10 QPS of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per pull queue. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + Output only. The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second]. + + Cloud Tasks will pick the value of ``max_burst_size`` based + on the value of + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second]. + + For App Engine queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + Since ``max_burst_size`` is output only, if + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + is called on a queue created by ``queue.yaml/xml``, + ``max_burst_size`` will be reset based on the value of + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second], + regardless of whether + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is updated. + max_concurrent_tasks (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget] and always + -1, which indicates no limit. No other queue types can have + ``max_concurrent_tasks`` set to -1. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_tasks_dispatched_per_second = proto.Field(proto.DOUBLE, number=1) + + max_burst_size = proto.Field(proto.INT32, number=2) + + max_concurrent_tasks = proto.Field(proto.INT32, number=3) + + +class RetryConfig(proto.Message): + r"""Retry config. + These settings determine how a failed task attempt is retried. + + Attributes: + max_attempts (int): + The maximum number of attempts for a task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be > 0. + unlimited_attempts (bool): + If true, then the number of attempts is + unlimited. + max_retry_duration (~.duration.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (~.duration.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (~.duration.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries retries at intervals of + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts = proto.Field(proto.INT32, number=1, oneof="num_attempts") + + unlimited_attempts = proto.Field(proto.BOOL, number=2, oneof="num_attempts") + + max_retry_duration = proto.Field( + proto.MESSAGE, number=3, message=duration.Duration, + ) + + min_backoff = proto.Field(proto.MESSAGE, number=4, message=duration.Duration,) + + max_backoff = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,) + + max_doublings = proto.Field(proto.INT32, number=6) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta2/types/target.py b/google/cloud/tasks_v2beta2/types/target.py new file mode 100644 index 00000000..d1d717e9 --- /dev/null +++ b/google/cloud/tasks_v2beta2/types/target.py @@ -0,0 +1,461 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={ + "HttpMethod", + "PullTarget", + "PullMessage", + "AppEngineHttpTarget", + "AppEngineHttpRequest", + "AppEngineRouting", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the task.""" + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + + +class PullTarget(proto.Message): + r"""Pull target.""" + + +class PullMessage(proto.Message): + r"""The pull message contains data that can be used by the caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] to + process the task. + + This proto can only be used for tasks in a queue which has + [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] set. + + Attributes: + payload (bytes): + A data payload consumed by the worker to + execute the task. + tag (str): + The task's tag. + + Tags allow similar tasks to be processed in a batch. If you + label tasks with a tag, your worker can [lease + tasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + with the same tag using + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]. + For example, if you want to aggregate the events associated + with a specific user once a day, you could tag tasks with + the user ID. + + The task's tag can only be set when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + The tag must be less than 500 characters. + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. If a tag + isn't UTF-8 encoded, the tag will be empty when the task is + returned by Cloud Tasks. + """ + + payload = proto.Field(proto.BYTES, number=1) + + tag = proto.Field(proto.STRING, number=2) + + +class AppEngineHttpTarget(proto.Message): + r"""App Engine HTTP target. + + The task will be delivered to the App Engine application hostname + specified by its + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + and + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest]. + The documentation for + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + explains how the task's host URL is constructed. + + Using + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + app_engine_routing_override (~.target.AppEngineRouting): + Overrides for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + + If set, ``app_engine_routing_override`` is used for all + tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + """ + + app_engine_routing_override = proto.Field( + proto.MESSAGE, number=1, message="AppEngineRouting", + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + This proto can only be used for tasks in a queue which has + [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] + set. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2beta2.AppEngineRouting] + used to construct the URL that the task is delivered to can be set + at the queue-level or task-level: + + - If set, + [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] + is used for all tasks in the queue, no matter what the setting is + for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] ``+`` + [relative_url][google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative_url] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][Task.dispatch_deadline]. Failed tasks will be retried + according to the [retry + configuration][google.cloud.tasks.v2beta2.Queue.retry_config]. + ``503`` (Service Unavailable) is considered an App Engine system + error instead of an application error and will cause Cloud Tasks' + traffic congestion control to temporarily throttle the queue's + dispatches. Unlike other types of task targets, a ``429`` (Too Many + Requests) response from an app handler does not cause traffic + congestion control to throttle the queue. + + Attributes: + http_method (~.target.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt will fail with error code 405 + (Method Not Allowed). See `Writing a push task request + handler `__ + and the documentation for the request handlers in the + language your app is written in e.g. `Python Request + Handler `__. + app_engine_routing (~.target.AppEngineRouting): + Task-level setting for App Engine routing. + + If set, + [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] + is used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + relative_url (str): + The relative URL. + The relative URL must begin with "/" and must be + a valid HTTP relative URL. It can contain a path + and query string arguments. If the relative URL + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (Sequence[~.target.AppEngineHttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [payload][google.cloud.tasks.v2beta2.AppEngineHttpRequest.payload], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-\*`` + - ``X-AppEngine-\*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2beta2.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + documentation. + payload (bytes): + Payload. + + The payload will be sent as the HTTP message body. A message + body, and thus a payload, is allowed only if the HTTP method + is POST or PUT. It is an error to set a data payload on a + task with an incompatible + [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. + """ + + http_method = proto.Field(proto.ENUM, number=1, enum="HttpMethod",) + + app_engine_routing = proto.Field( + proto.MESSAGE, number=2, message="AppEngineRouting", + ) + + relative_url = proto.Field(proto.STRING, number=3) + + headers = proto.MapField(proto.STRING, proto.STRING, number=4) + + payload = proto.Field(proto.BYTES, number=5) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + For more information, see `How Requests are + Routed `__. + + The host is constructed as: + + - ``host = [application_domain_name]``\ + ``| [service] + '.' + [application_domain_name]``\ + ``| [version] + '.' + [application_domain_name]``\ + ``| [version_dot_service]+ '.' + [application_domain_name]``\ + ``| [instance] + '.' + [application_domain_name]``\ + ``| [instance_dot_service] + '.' + [application_domain_name]``\ + ``| [instance_dot_version] + '.' + [application_domain_name]``\ + ``| [instance_dot_version_dot_service] + '.' + [application_domain_name]`` + + - ``application_domain_name`` = The domain name of the app, + for example .appspot.com, which is associated with the + queue's project ID. Some tasks which were created using + the App Engine SDK use a custom domain name. + + - ``service =`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``version =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + + - ``version_dot_service =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``instance =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + + - ``instance_dot_service =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``instance_dot_version =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + + - ``instance_dot_version_dot_service =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + If + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + is empty, then the task will be sent to the service which is + the default service when the task is attempted. + + If + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + is empty, then the task will be sent to the version which is + the default version when the task is attempted. + + If + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + is empty, then the task will be sent to an instance which is + available when the task is attempted. + + If + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + or + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + is invalid, then the task will be sent to the default + version of the default service when the task is attempted. + """ + + service = proto.Field(proto.STRING, number=1) + + version = proto.Field(proto.STRING, number=2) + + instance = proto.Field(proto.STRING, number=3) + + host = proto.Field(proto.STRING, number=4) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta2/types/task.py b/google/cloud/tasks_v2beta2/types/task.py new file mode 100644 index 00000000..ad7cd4e0 --- /dev/null +++ b/google/cloud/tasks_v2beta2/types/task.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2beta2.types import target +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as gr_status # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={"Task", "TaskStatus", "AttemptStatus",}, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (~.target.AppEngineHttpRequest): + App Engine HTTP request that is sent to the task's target. + Can be set only if + [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] + is set on the queue. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + set. + pull_message (~.target.PullMessage): + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + to process the task. Can be set only if + [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] + is set on the queue. + + A pull task is a task that has + [PullMessage][google.cloud.tasks.v2beta2.PullMessage] set. + schedule_time (~.timestamp.Timestamp): + The time when the task is scheduled to be attempted. + + For App Engine queues, this is when the task will be + attempted or retried. + + For pull queues, this is the time when the task is available + to be leased; if a task is currently leased, this is the + time when the current lease expires, that is, the time that + the task was leased plus the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (~.timestamp.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + status (~.task.TaskStatus): + Output only. The task status. + view (~.task.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] has been returned. + """ + + class View(proto.Enum): + r"""The view specifies a subset of + [Task][google.cloud.tasks.v2beta2.Task] data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name = proto.Field(proto.STRING, number=1) + + app_engine_http_request = proto.Field( + proto.MESSAGE, + number=3, + oneof="payload_type", + message=target.AppEngineHttpRequest, + ) + + pull_message = proto.Field( + proto.MESSAGE, number=4, oneof="payload_type", message=target.PullMessage, + ) + + schedule_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + status = proto.Field(proto.MESSAGE, number=7, message="TaskStatus",) + + view = proto.Field(proto.ENUM, number=8, enum=View,) + + +class TaskStatus(proto.Message): + r"""Status of the task. + + Attributes: + attempt_dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + attempt_response_count (int): + Output only. The number of attempts which have received a + response. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + first_attempt_status (~.task.AttemptStatus): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2beta2.AttemptStatus.dispatch_time] + will be set. The other + [AttemptStatus][google.cloud.tasks.v2beta2.AttemptStatus] + information is not retained by Cloud Tasks. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + last_attempt_status (~.task.AttemptStatus): + Output only. The status of the task's last attempt. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + """ + + attempt_dispatch_count = proto.Field(proto.INT32, number=1) + + attempt_response_count = proto.Field(proto.INT32, number=2) + + first_attempt_status = proto.Field( + proto.MESSAGE, number=3, message="AttemptStatus", + ) + + last_attempt_status = proto.Field(proto.MESSAGE, number=4, message="AttemptStatus",) + + +class AttemptStatus(proto.Message): + r"""The status of a task attempt. + + Attributes: + schedule_time (~.timestamp.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (~.timestamp.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (~.timestamp.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (~.gr_status.Status): + Output only. The response from the target for + this attempt. + If the task has not been attempted or the task + is currently running then the response status is + unset. + """ + + schedule_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + dispatch_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + response_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + response_status = proto.Field(proto.MESSAGE, number=4, message=gr_status.Status,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta3/__init__.py b/google/cloud/tasks_v2beta3/__init__.py index 5096a5b8..9371d755 100644 --- a/google/cloud/tasks_v2beta3/__init__.py +++ b/google/cloud/tasks_v2beta3/__init__.py @@ -1,45 +1,79 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.tasks_v2beta3 import types -from google.cloud.tasks_v2beta3.gapic import cloud_tasks_client -from google.cloud.tasks_v2beta3.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class CloudTasksClient(cloud_tasks_client.CloudTasksClient): - __doc__ = cloud_tasks_client.CloudTasksClient.__doc__ - enums = enums +from .services.cloud_tasks import CloudTasksClient +from .types.cloudtasks import CreateQueueRequest +from .types.cloudtasks import CreateTaskRequest +from .types.cloudtasks import DeleteQueueRequest +from .types.cloudtasks import DeleteTaskRequest +from .types.cloudtasks import GetQueueRequest +from .types.cloudtasks import GetTaskRequest +from .types.cloudtasks import ListQueuesRequest +from .types.cloudtasks import ListQueuesResponse +from .types.cloudtasks import ListTasksRequest +from .types.cloudtasks import ListTasksResponse +from .types.cloudtasks import PauseQueueRequest +from .types.cloudtasks import PurgeQueueRequest +from .types.cloudtasks import ResumeQueueRequest +from .types.cloudtasks import RunTaskRequest +from .types.cloudtasks import UpdateQueueRequest +from .types.queue import Queue +from .types.queue import RateLimits +from .types.queue import RetryConfig +from .types.queue import StackdriverLoggingConfig +from .types.target import AppEngineHttpQueue +from .types.target import AppEngineHttpRequest +from .types.target import AppEngineRouting +from .types.target import HttpMethod +from .types.target import HttpRequest +from .types.target import OAuthToken +from .types.target import OidcToken +from .types.task import Attempt +from .types.task import Task __all__ = ( - "enums", - "types", + "AppEngineHttpQueue", + "AppEngineHttpRequest", + "AppEngineRouting", + "Attempt", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "HttpMethod", + "HttpRequest", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "OAuthToken", + "OidcToken", + "PauseQueueRequest", + "PurgeQueueRequest", + "Queue", + "RateLimits", + "ResumeQueueRequest", + "RetryConfig", + "RunTaskRequest", + "StackdriverLoggingConfig", + "Task", + "UpdateQueueRequest", "CloudTasksClient", ) diff --git a/google/cloud/tasks_v2beta3/gapic/__init__.py b/google/cloud/tasks_v2beta3/gapic/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py b/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py deleted file mode 100644 index 6b6c4e34..00000000 --- a/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py +++ /dev/null @@ -1,1700 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.tasks.v2beta3 CloudTasks API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.tasks_v2beta3.gapic import cloud_tasks_client_config -from google.cloud.tasks_v2beta3.gapic import enums -from google.cloud.tasks_v2beta3.gapic.transports import cloud_tasks_grpc_transport -from google.cloud.tasks_v2beta3.proto import cloudtasks_pb2 -from google.cloud.tasks_v2beta3.proto import cloudtasks_pb2_grpc -from google.cloud.tasks_v2beta3.proto import queue_pb2 -from google.cloud.tasks_v2beta3.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-tasks",).version - - -class CloudTasksClient(object): - """ - Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - SERVICE_ADDRESS = "cloudtasks.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.tasks.v2beta3.CloudTasks" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudTasksClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def location_path(cls, project, location): - """Return a fully-qualified location string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}", - project=project, - location=location, - ) - - @classmethod - def queue_path(cls, project, location, queue): - """Return a fully-qualified queue string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/queues/{queue}", - project=project, - location=location, - queue=queue, - ) - - @classmethod - def task_path(cls, project, location, queue, task): - """Return a fully-qualified task string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}", - project=project, - location=location, - queue=queue, - task=task, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.CloudTasksGrpcTransport, - Callable[[~.Credentials, type], ~.CloudTasksGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = cloud_tasks_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=cloud_tasks_grpc_transport.CloudTasksGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = cloud_tasks_grpc_transport.CloudTasksGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def list_queues( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists queues. - - Queues are returned in lexicographical order. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_queues(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_queues(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter_ (str): ``filter`` can be used to specify a subset of queues. Any ``Queue`` - field can be used as a filter and several operators as supported. For - example: ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver's Advanced Logs - Filters `__. - - Sample filter "state: PAUSED". - - Note that using filters might cause fewer queues than the requested - page_size to be returned. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.tasks_v2beta3.types.Queue` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_queues" not in self._inner_api_calls: - self._inner_api_calls[ - "list_queues" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_queues, - default_retry=self._method_configs["ListQueues"].retry, - default_timeout=self._method_configs["ListQueues"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ListQueuesRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_queues"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="queues", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a queue. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.get_queue(name) - - Args: - name (str): Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "get_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_queue, - default_retry=self._method_configs["GetQueue"].retry, - default_timeout=self._method_configs["GetQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.GetQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_queue( - self, - parent, - queue, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # Initialize `queue`: - >>> queue = { - ... # The fully qualified path to the queue - ... 'name': client.queue_path('[PROJECT]', '[LOCATION]', '[NAME]'), - ... 'app_engine_http_queue': { - ... 'app_engine_routing_override': { - ... # The App Engine service that will receive the tasks. - ... 'service': 'default', - ... }, - ... }, - ... } - >>> - >>> response = client.create_queue(parent, queue) - - Args: - parent (str): Required. The location name in which the queue will be created. For - example: ``projects/PROJECT_ID/locations/LOCATION_ID`` - - The list of allowed locations can be obtained by calling Cloud Tasks' - implementation of ``ListLocations``. - queue (Union[dict, ~google.cloud.tasks_v2beta3.types.Queue]): Required. The queue to create. - - ``Queue's name`` cannot be the same as an existing queue. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta3.types.Queue` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "create_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_queue, - default_retry=self._method_configs["CreateQueue"].retry, - default_timeout=self._method_configs["CreateQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_queue( - self, - queue, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a queue. - - This method creates the queue if it does not exist and updates the queue - if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> # Initialize `queue`: - >>> queue = { - ... # The fully qualified path to the queue - ... 'name': client.queue_path('[PROJECT]', '[LOCATION]', '[NAME]'), - ... 'app_engine_http_queue': { - ... 'app_engine_routing_override': { - ... # The App Engine service that will receive the tasks. - ... 'service': 'default', - ... }, - ... }, - ... } - >>> - >>> response = client.update_queue(queue) - - Args: - queue (Union[dict, ~google.cloud.tasks_v2beta3.types.Queue]): Required. The queue to create or update. - - The queue's ``name`` must be specified. - - Output only fields cannot be modified using UpdateQueue. Any value - specified for an output only field will be ignored. The queue's ``name`` - cannot be changed. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta3.types.Queue` - update_mask (Union[dict, ~google.cloud.tasks_v2beta3.types.FieldMask]): A mask used to specify which fields of the queue are being updated. - - If empty, then all fields will be updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta3.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "update_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_queue, - default_retry=self._method_configs["UpdateQueue"].retry, - default_timeout=self._method_configs["UpdateQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.UpdateQueueRequest( - queue=queue, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("queue.name", queue.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> client.delete_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_queue, - default_retry=self._method_configs["DeleteQueue"].retry, - default_timeout=self._method_configs["DeleteQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.DeleteQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def purge_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.purge_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "purge_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "purge_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.purge_queue, - default_retry=self._method_configs["PurgeQueue"].retry, - default_timeout=self._method_configs["PurgeQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.PurgeQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["purge_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def pause_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks until - the queue is resumed via ``ResumeQueue``. Tasks can still be added when - the queue is paused. A queue is paused if its ``state`` is ``PAUSED``. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.pause_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "pause_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "pause_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pause_queue, - default_retry=self._method_configs["PauseQueue"].retry, - default_timeout=self._method_configs["PauseQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.PauseQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["pause_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def resume_queue( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Resume a queue. - - This method resumes a queue after it has been ``PAUSED`` or - ``DISABLED``. The state of a queue is stored in the queue's ``state``; - after calling this method it will be set to ``RUNNING``. - - WARNING: Resuming many high-QPS queues at the same time can lead to - target overloading. If you are resuming high-QPS queues, follow the - 500/50/5 pattern described in `Managing Cloud Tasks Scaling - Risks `__. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> response = client.resume_queue(name) - - Args: - name (str): Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "resume_queue" not in self._inner_api_calls: - self._inner_api_calls[ - "resume_queue" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.resume_queue, - default_retry=self._method_configs["ResumeQueue"].retry, - default_timeout=self._method_configs["ResumeQueue"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ResumeQueueRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["resume_queue"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a ``Queue``. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.tasks_v2beta3.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta3.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy for a ``Queue``. Replaces any - existing policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.tasks_v2beta3.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta3.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on a ``Queue``. If the - resource does not exist, this will return an empty set of permissions, - not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_tasks( - self, - parent, - response_view=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the tasks in a queue. - - By default, only the ``BASIC`` view is retrieved due to performance - considerations; ``response_view`` controls the subset of information - which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> # Iterate over all results - >>> for element in client.list_tasks(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_tasks(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.tasks_v2beta3.types.Task` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_tasks" not in self._inner_api_calls: - self._inner_api_calls[ - "list_tasks" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_tasks, - default_retry=self._method_configs["ListTasks"].retry, - default_timeout=self._method_configs["ListTasks"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.ListTasksRequest( - parent=parent, response_view=response_view, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_tasks"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="tasks", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_task( - self, - name, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a task. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> response = client.get_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_task" not in self._inner_api_calls: - self._inner_api_calls[ - "get_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_task, - default_retry=self._method_configs["GetTask"].retry, - default_timeout=self._method_configs["GetTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.GetTaskRequest(name=name, response_view=response_view,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_task( - self, - parent, - task, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - - The maximum task size is 100KB. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') - >>> - >>> # TODO: Initialize `task`: - >>> task = {} - >>> - >>> response = client.create_task(parent, task) - - Args: - parent (str): Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - The queue must already exist. - task (Union[dict, ~google.cloud.tasks_v2beta3.types.Task]): Required. The task to add. - - Task names have the following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. - The user can optionally specify a task ``name``. If a name is not - specified then the system will generate a random unique task id, which - will be set in the task returned in the ``response``. - - If ``schedule_time`` is not set or is in the past then Cloud Tasks will - set it to the current time. - - Task De-duplication: - - Explicitly specifying a task ID enables task de-duplication. If a task's - ID is identical to that of an existing task or a task that was deleted - or executed recently then the call will fail with ``ALREADY_EXISTS``. If - the task's queue was created using Cloud Tasks, then another task with - the same name can't be created for ~1hour after the original task was - deleted or executed. If the task's queue was created using queue.yaml or - queue.xml, then another task with the same name can't be created for - ~9days after the original task was deleted or executed. - - Because there is an extra lookup cost to identify duplicate task names, - these ``CreateTask`` calls have significantly increased latency. Using - hashed strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have sequential - prefixes, for example using a timestamp, causes an increase in latency - and error rates in all task commands. The infrastructure relies on an - approximately uniform distribution of task ids to store and serve tasks - efficiently. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.tasks_v2beta3.types.Task` - response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_task" not in self._inner_api_calls: - self._inner_api_calls[ - "create_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_task, - default_retry=self._method_configs["CreateTask"].retry, - default_timeout=self._method_configs["CreateTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.CreateTaskRequest( - parent=parent, task=task, response_view=response_view, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_task( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has executed successfully or permanently - failed. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> client.delete_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_task" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_task, - default_retry=self._method_configs["DeleteTask"].retry, - default_timeout=self._method_configs["DeleteTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.DeleteTaskRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def run_task( - self, - name, - response_view=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its ``RateLimits`` or - is ``PAUSED``. - - This command is meant to be used for manual debugging. For example, - ``RunTask`` can be used to retry a failed task after a fix has been made - or to manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the ``status`` after the task is dispatched but before the task - is received by its target. - - If Cloud Tasks receives a successful response from the task's target, - then the task will be deleted; otherwise the task's ``schedule_time`` - will be reset to the time that ``RunTask`` was called plus the retry - delay specified in the queue's ``RetryConfig``. - - ``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has - already succeeded or permanently failed. - - Example: - >>> from google.cloud import tasks_v2beta3 - >>> - >>> client = tasks_v2beta3.CloudTasksClient() - >>> - >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') - >>> - >>> response = client.run_task(name) - - Args: - name (str): Required. The task name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response_view specifies which subset of the ``Task`` will be - returned. - - By default response_view is ``BASIC``; not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `___ permission on the - ``Task`` resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - - Returns: - A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "run_task" not in self._inner_api_calls: - self._inner_api_calls[ - "run_task" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_task, - default_retry=self._method_configs["RunTask"].retry, - default_timeout=self._method_configs["RunTask"].timeout, - client_info=self._client_info, - ) - - request = cloudtasks_pb2.RunTaskRequest(name=name, response_view=response_view,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["run_task"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client_config.py b/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client_config.py deleted file mode 100644 index 0db084a8..00000000 --- a/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client_config.py +++ /dev/null @@ -1,122 +0,0 @@ -config = { - "interfaces": { - "google.cloud.tasks.v2beta3.CloudTasks": { - "retry_codes": { - "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 10000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 10000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, - "total_timeout_millis": 10000, - }, - }, - "methods": { - "ListQueues": { - "timeout_millis": 15000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetQueue": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UpdateQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteQueue": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "PurgeQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "PauseQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ResumeQueue": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetIamPolicy": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "SetIamPolicy": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "TestIamPermissions": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListTasks": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetTask": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateTask": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteTask": { - "timeout_millis": 10000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "RunTask": { - "timeout_millis": 10000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - }, - } - } -} diff --git a/google/cloud/tasks_v2beta3/gapic/enums.py b/google/cloud/tasks_v2beta3/gapic/enums.py deleted file mode 100644 index 1c243db6..00000000 --- a/google/cloud/tasks_v2beta3/gapic/enums.py +++ /dev/null @@ -1,127 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class HttpMethod(enum.IntEnum): - """ - The HTTP method used to execute the task. - - Attributes: - HTTP_METHOD_UNSPECIFIED (int): HTTP method unspecified - POST (int): HTTP POST - GET (int): HTTP GET - HEAD (int): HTTP HEAD - PUT (int): HTTP PUT - DELETE (int): HTTP DELETE - PATCH (int): HTTP PATCH - OPTIONS (int): HTTP OPTIONS - """ - - HTTP_METHOD_UNSPECIFIED = 0 - POST = 1 - GET = 2 - HEAD = 3 - PUT = 4 - DELETE = 5 - PATCH = 6 - OPTIONS = 7 - - -class Queue(object): - class State(enum.IntEnum): - """ - State of the queue. - - Attributes: - STATE_UNSPECIFIED (int): Unspecified state. - RUNNING (int): The queue is running. Tasks can be dispatched. - - If the queue was created using Cloud Tasks and the queue has had no - activity (method calls or task dispatches) for 30 days, the queue may - take a few minutes to re-activate. Some method calls may return - ``NOT_FOUND`` and tasks may not be dispatched for a few minutes until - the queue has been re-activated. - PAUSED (int): Tasks are paused by the user. If the queue is paused then Cloud - Tasks will stop delivering tasks from it, but more tasks can - still be added to it by the user. - DISABLED (int): The queue is disabled. - - A queue becomes ``DISABLED`` when - `queue.yaml `__ - or - `queue.xml `__ - is uploaded which does not contain the queue. You cannot directly - disable a queue. - - When a queue is disabled, tasks can still be added to a queue but the - tasks are not dispatched. - - To permanently delete this queue and all of its tasks, call - ``DeleteQueue``. - """ - - STATE_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - DISABLED = 3 - - class Type(enum.IntEnum): - """ - The type of the queue. - - Attributes: - TYPE_UNSPECIFIED (int): Default value. - PULL (int): A pull queue. - PUSH (int): A push queue. - """ - - TYPE_UNSPECIFIED = 0 - PULL = 1 - PUSH = 2 - - -class Task(object): - class View(enum.IntEnum): - """ - The view specifies a subset of ``Task`` data. - - When a task is returned in a response, not all information is retrieved - by default because some data, such as payloads, might be desirable to - return only when needed because of its large size or because of the - sensitivity of data that it contains. - - Attributes: - VIEW_UNSPECIFIED (int): Unspecified. Defaults to BASIC. - BASIC (int): The basic view omits fields which can be large or can contain - sensitive data. - - This view does not include the ``body in AppEngineHttpRequest``. Bodies - are desirable to return only when needed, because they can be large and - because of the sensitivity of the data that you choose to store in it. - FULL (int): All information is returned. - - Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` - `Google IAM `__ permission on the - ``Queue`` resource. - """ - - VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 diff --git a/google/cloud/tasks_v2beta3/gapic/transports/__init__.py b/google/cloud/tasks_v2beta3/gapic/transports/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2beta3/gapic/transports/cloud_tasks_grpc_transport.py b/google/cloud/tasks_v2beta3/gapic/transports/cloud_tasks_grpc_transport.py deleted file mode 100644 index 7d7d4f99..00000000 --- a/google/cloud/tasks_v2beta3/gapic/transports/cloud_tasks_grpc_transport.py +++ /dev/null @@ -1,428 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.tasks_v2beta3.proto import cloudtasks_pb2_grpc - - -class CloudTasksGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.tasks.v2beta3 CloudTasks API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="cloudtasks.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "cloud_tasks_stub": cloudtasks_pb2_grpc.CloudTasksStub(channel), - } - - @classmethod - def create_channel( - cls, address="cloudtasks.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def list_queues(self): - """Return the gRPC stub for :meth:`CloudTasksClient.list_queues`. - - Lists queues. - - Queues are returned in lexicographical order. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ListQueues - - @property - def get_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_queue`. - - Gets a queue. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetQueue - - @property - def create_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.create_queue`. - - Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].CreateQueue - - @property - def update_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.update_queue`. - - Updates a queue. - - This method creates the queue if it does not exist and updates the queue - if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless - of whether it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].UpdateQueue - - @property - def delete_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.delete_queue`. - - Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your - queues. Read `Overview of Queue Management and - queue.yaml `__ before - using this method. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].DeleteQueue - - @property - def purge_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.purge_queue`. - - Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].PurgeQueue - - @property - def pause_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.pause_queue`. - - Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks until - the queue is resumed via ``ResumeQueue``. Tasks can still be added when - the queue is paused. A queue is paused if its ``state`` is ``PAUSED``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].PauseQueue - - @property - def resume_queue(self): - """Return the gRPC stub for :meth:`CloudTasksClient.resume_queue`. - - Resume a queue. - - This method resumes a queue after it has been ``PAUSED`` or - ``DISABLED``. The state of a queue is stored in the queue's ``state``; - after calling this method it will be set to ``RUNNING``. - - WARNING: Resuming many high-QPS queues at the same time can lead to - target overloading. If you are resuming high-QPS queues, follow the - 500/50/5 pattern described in `Managing Cloud Tasks Scaling - Risks `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ResumeQueue - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_iam_policy`. - - Gets the access control policy for a ``Queue``. Returns an empty - policy if the resource exists and does not have a policy set. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.getIamPolicy`` - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetIamPolicy - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`CloudTasksClient.set_iam_policy`. - - Sets the access control policy for a ``Queue``. Replaces any - existing policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following `Google - IAM `__ permission on the specified - resource parent: - - - ``cloudtasks.queues.setIamPolicy`` - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].SetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`CloudTasksClient.test_iam_permissions`. - - Returns permissions that a caller has on a ``Queue``. If the - resource does not exist, this will return an empty set of permissions, - not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].TestIamPermissions - - @property - def list_tasks(self): - """Return the gRPC stub for :meth:`CloudTasksClient.list_tasks`. - - Lists the tasks in a queue. - - By default, only the ``BASIC`` view is retrieved due to performance - considerations; ``response_view`` controls the subset of information - which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].ListTasks - - @property - def get_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.get_task`. - - Gets a task. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].GetTask - - @property - def create_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.create_task`. - - Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - - The maximum task size is 100KB. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].CreateTask - - @property - def delete_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.delete_task`. - - Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has executed successfully or permanently - failed. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].DeleteTask - - @property - def run_task(self): - """Return the gRPC stub for :meth:`CloudTasksClient.run_task`. - - Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its ``RateLimits`` or - is ``PAUSED``. - - This command is meant to be used for manual debugging. For example, - ``RunTask`` can be used to retry a failed task after a fix has been made - or to manually force a task to be dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the ``status`` after the task is dispatched but before the task - is received by its target. - - If Cloud Tasks receives a successful response from the task's target, - then the task will be deleted; otherwise the task's ``schedule_time`` - will be reset to the time that ``RunTask`` was called plus the retry - delay specified in the queue's ``RetryConfig``. - - ``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has - already succeeded or permanently failed. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["cloud_tasks_stub"].RunTask diff --git a/google/cloud/tasks_v2beta3/proto/__init__.py b/google/cloud/tasks_v2beta3/proto/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py b/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py deleted file mode 100644 index 80db9c78..00000000 --- a/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py +++ /dev/null @@ -1,1612 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta3/proto/cloudtasks.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2beta3.proto import ( - queue_pb2 as google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2, -) -from google.cloud.tasks_v2beta3.proto import ( - task_pb2 as google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2, -) -from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 -from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta3/proto/cloudtasks.proto", - package="google.cloud.tasks.v2beta3", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta3B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\242\002\005TASKS", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n1google/cloud/tasks_v2beta3/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta3/proto/queue.proto\x1a+google/cloud/tasks_v2beta3/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta3.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x84\x01\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaf\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta3.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"\xbe\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta3.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View2\xa5\x16\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta3.ListQueuesRequest\x1a..google.cloud.tasks.v2beta3.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta3.GetQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta3.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta3/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta3.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta3/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta3.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta3.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta3.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta3.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta3/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta3.ListTasksRequest\x1a-.google.cloud.tasks.v2beta3.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta3.GetTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta3.CreateTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta3.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta3.RunTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"M\x82\xd3\xe4\x93\x02@";/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta3B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\xa2\x02\x05TASKSb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.DESCRIPTOR, - google_dot_iam_dot_v1_dot_iam__policy__pb2.DESCRIPTOR, - google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - ], -) - - -_LISTQUEUESREQUEST = _descriptor.Descriptor( - name="ListQueuesRequest", - full_name="google.cloud.tasks.v2beta3.ListQueuesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta3.ListQueuesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.tasks.v2beta3.ListQueuesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.tasks.v2beta3.ListQueuesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.tasks.v2beta3.ListQueuesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=411, - serialized_end=542, -) - - -_LISTQUEUESRESPONSE = _descriptor.Descriptor( - name="ListQueuesResponse", - full_name="google.cloud.tasks.v2beta3.ListQueuesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="queues", - full_name="google.cloud.tasks.v2beta3.ListQueuesResponse.queues", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=544, - serialized_end=640, -) - - -_GETQUEUEREQUEST = _descriptor.Descriptor( - name="GetQueueRequest", - full_name="google.cloud.tasks.v2beta3.GetQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.GetQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=642, - serialized_end=714, -) - - -_CREATEQUEUEREQUEST = _descriptor.Descriptor( - name="CreateQueueRequest", - full_name="google.cloud.tasks.v2beta3.CreateQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta3.CreateQueueRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="queue", - full_name="google.cloud.tasks.v2beta3.CreateQueueRequest.queue", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=717, - serialized_end=849, -) - - -_UPDATEQUEUEREQUEST = _descriptor.Descriptor( - name="UpdateQueueRequest", - full_name="google.cloud.tasks.v2beta3.UpdateQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="queue", - full_name="google.cloud.tasks.v2beta3.UpdateQueueRequest.queue", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.tasks.v2beta3.UpdateQueueRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=851, - serialized_end=975, -) - - -_DELETEQUEUEREQUEST = _descriptor.Descriptor( - name="DeleteQueueRequest", - full_name="google.cloud.tasks.v2beta3.DeleteQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.DeleteQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=977, - serialized_end=1052, -) - - -_PURGEQUEUEREQUEST = _descriptor.Descriptor( - name="PurgeQueueRequest", - full_name="google.cloud.tasks.v2beta3.PurgeQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.PurgeQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1054, - serialized_end=1128, -) - - -_PAUSEQUEUEREQUEST = _descriptor.Descriptor( - name="PauseQueueRequest", - full_name="google.cloud.tasks.v2beta3.PauseQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.PauseQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1130, - serialized_end=1204, -) - - -_RESUMEQUEUEREQUEST = _descriptor.Descriptor( - name="ResumeQueueRequest", - full_name="google.cloud.tasks.v2beta3.ResumeQueueRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.ResumeQueueRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1206, - serialized_end=1281, -) - - -_LISTTASKSREQUEST = _descriptor.Descriptor( - name="ListTasksRequest", - full_name="google.cloud.tasks.v2beta3.ListTasksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta3.ListTasksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \022\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta3.ListTasksRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.tasks.v2beta3.ListTasksRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.tasks.v2beta3.ListTasksRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1284, - serialized_end=1459, -) - - -_LISTTASKSRESPONSE = _descriptor.Descriptor( - name="ListTasksResponse", - full_name="google.cloud.tasks.v2beta3.ListTasksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="tasks", - full_name="google.cloud.tasks.v2beta3.ListTasksResponse.tasks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1461, - serialized_end=1554, -) - - -_GETTASKREQUEST = _descriptor.Descriptor( - name="GetTaskRequest", - full_name="google.cloud.tasks.v2beta3.GetTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.GetTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta3.GetTaskRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1557, - serialized_end=1689, -) - - -_CREATETASKREQUEST = _descriptor.Descriptor( - name="CreateTaskRequest", - full_name="google.cloud.tasks.v2beta3.CreateTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.tasks.v2beta3.CreateTaskRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \022\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="task", - full_name="google.cloud.tasks.v2beta3.CreateTaskRequest.task", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta3.CreateTaskRequest.response_view", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1692, - serialized_end=1882, -) - - -_DELETETASKREQUEST = _descriptor.Descriptor( - name="DeleteTaskRequest", - full_name="google.cloud.tasks.v2beta3.DeleteTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.DeleteTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1884, - serialized_end=1957, -) - - -_RUNTASKREQUEST = _descriptor.Descriptor( - name="RunTaskRequest", - full_name="google.cloud.tasks.v2beta3.RunTaskRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.RunTaskRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036cloudtasks.googleapis.com/Task", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_view", - full_name="google.cloud.tasks.v2beta3.RunTaskRequest.response_view", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1960, - serialized_end=2092, -) - -_LISTQUEUESRESPONSE.fields_by_name[ - "queues" -].message_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE -_CREATEQUEUEREQUEST.fields_by_name[ - "queue" -].message_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE -_UPDATEQUEUEREQUEST.fields_by_name[ - "queue" -].message_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE -_UPDATEQUEUEREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTTASKSREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK_VIEW -_LISTTASKSRESPONSE.fields_by_name[ - "tasks" -].message_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK -_GETTASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK_VIEW -_CREATETASKREQUEST.fields_by_name[ - "task" -].message_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK -_CREATETASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK_VIEW -_RUNTASKREQUEST.fields_by_name[ - "response_view" -].enum_type = google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK_VIEW -DESCRIPTOR.message_types_by_name["ListQueuesRequest"] = _LISTQUEUESREQUEST -DESCRIPTOR.message_types_by_name["ListQueuesResponse"] = _LISTQUEUESRESPONSE -DESCRIPTOR.message_types_by_name["GetQueueRequest"] = _GETQUEUEREQUEST -DESCRIPTOR.message_types_by_name["CreateQueueRequest"] = _CREATEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["UpdateQueueRequest"] = _UPDATEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["DeleteQueueRequest"] = _DELETEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["PurgeQueueRequest"] = _PURGEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["PauseQueueRequest"] = _PAUSEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["ResumeQueueRequest"] = _RESUMEQUEUEREQUEST -DESCRIPTOR.message_types_by_name["ListTasksRequest"] = _LISTTASKSREQUEST -DESCRIPTOR.message_types_by_name["ListTasksResponse"] = _LISTTASKSRESPONSE -DESCRIPTOR.message_types_by_name["GetTaskRequest"] = _GETTASKREQUEST -DESCRIPTOR.message_types_by_name["CreateTaskRequest"] = _CREATETASKREQUEST -DESCRIPTOR.message_types_by_name["DeleteTaskRequest"] = _DELETETASKREQUEST -DESCRIPTOR.message_types_by_name["RunTaskRequest"] = _RUNTASKREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ListQueuesRequest = _reflection.GeneratedProtocolMessageType( - "ListQueuesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTQUEUESREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - - Attributes: - parent: - Required. The location name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` - filter: - \ ``filter`` can be used to specify a subset of queues. Any - [Queue][google.cloud.tasks.v2beta3.Queue] field can be used as - a filter and several operators as supported. For example: - ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as - described in `Stackdriver’s Advanced Logs Filters `_. Sample - filter “state: PAUSED”. Note that using filters might cause - fewer queues than the requested page_size to be returned. - page_size: - Requested page size. The maximum page size is 9800. If - unspecified, the page size will be the maximum. Fewer queues - than requested might be returned, even if more queues exist; - use the [next_page_token][google.cloud.tasks.v2beta3.ListQueue - sResponse.next_page_token] in the response to determine if - more queues exist. - page_token: - A token identifying the page of results to return. To request - the first page results, page_token must be empty. To request - the next page of results, page_token must be the value of [nex - t_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.ne - xt_page_token] returned from the previous call to - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] - method. It is an error to switch the value of the - [filter][google.cloud.tasks.v2beta3.ListQueuesRequest.filter] - while iterating through pages. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.ListQueuesRequest) - }, -) -_sym_db.RegisterMessage(ListQueuesRequest) - -ListQueuesResponse = _reflection.GeneratedProtocolMessageType( - "ListQueuesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTQUEUESRESPONSE, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Response message for - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. - - Attributes: - queues: - The list of queues. - next_page_token: - A token to retrieve next page of results. To return the next - page of results, call - [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] - with this value as the [page_token][google.cloud.tasks.v2beta3 - .ListQueuesRequest.page_token]. If the next_page_token is - empty, there are no more results. The page token is valid for - only 2 hours. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.ListQueuesResponse) - }, -) -_sym_db.RegisterMessage(ListQueuesResponse) - -GetQueueRequest = _reflection.GeneratedProtocolMessageType( - "GetQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. - - Attributes: - name: - Required. The resource name of the queue. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.GetQueueRequest) - }, -) -_sym_db.RegisterMessage(GetQueueRequest) - -CreateQueueRequest = _reflection.GeneratedProtocolMessageType( - "CreateQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. - - Attributes: - parent: - Required. The location name in which the queue will be - created. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID`` The list of - allowed locations can be obtained by calling Cloud Tasks’ - implementation of [ListLocations][google.cloud.location.Locati - ons.ListLocations]. - queue: - Required. The queue to create. [Queue’s - name][google.cloud.tasks.v2beta3.Queue.name] cannot be the - same as an existing queue. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.CreateQueueRequest) - }, -) -_sym_db.RegisterMessage(CreateQueueRequest) - -UpdateQueueRequest = _reflection.GeneratedProtocolMessageType( - "UpdateQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. - - Attributes: - queue: - Required. The queue to create or update. The queue’s - [name][google.cloud.tasks.v2beta3.Queue.name] must be - specified. Output only fields cannot be modified using - UpdateQueue. Any value specified for an output only field will - be ignored. The queue’s - [name][google.cloud.tasks.v2beta3.Queue.name] cannot be - changed. - update_mask: - A mask used to specify which fields of the queue are being - updated. If empty, then all fields will be updated. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.UpdateQueueRequest) - }, -) -_sym_db.RegisterMessage(UpdateQueueRequest) - -DeleteQueueRequest = _reflection.GeneratedProtocolMessageType( - "DeleteQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.DeleteQueueRequest) - }, -) -_sym_db.RegisterMessage(DeleteQueueRequest) - -PurgeQueueRequest = _reflection.GeneratedProtocolMessageType( - "PurgeQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _PURGEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.PurgeQueueRequest) - }, -) -_sym_db.RegisterMessage(PurgeQueueRequest) - -PauseQueueRequest = _reflection.GeneratedProtocolMessageType( - "PauseQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _PAUSEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.PauseQueueRequest) - }, -) -_sym_db.RegisterMessage(PauseQueueRequest) - -ResumeQueueRequest = _reflection.GeneratedProtocolMessageType( - "ResumeQueueRequest", - (_message.Message,), - { - "DESCRIPTOR": _RESUMEQUEUEREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. - - Attributes: - name: - Required. The queue name. For example: - ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.ResumeQueueRequest) - }, -) -_sym_db.RegisterMessage(ResumeQueueRequest) - -ListTasksRequest = _reflection.GeneratedProtocolMessageType( - "ListTasksRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTASKSREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - - Attributes: - parent: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - page_size: - Maximum page size. Fewer tasks than requested might be - returned, even if more tasks exist; use [next_page_token][goog - le.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] in - the response to determine if more tasks exist. The maximum - page size is 1000. If unspecified, the page size will be the - maximum. - page_token: - A token identifying the page of results to return. To request - the first page results, page_token must be empty. To request - the next page of results, page_token must be the value of [nex - t_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.nex - t_page_token] returned from the previous call to - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] - method. The page token is valid for only 2 hours. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.ListTasksRequest) - }, -) -_sym_db.RegisterMessage(ListTasksRequest) - -ListTasksResponse = _reflection.GeneratedProtocolMessageType( - "ListTasksResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTASKSRESPONSE, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Response message for listing tasks using - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. - - Attributes: - tasks: - The list of tasks. - next_page_token: - A token to retrieve next page of results. To return the next - page of results, call - [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] - with this value as the [page_token][google.cloud.tasks.v2beta3 - .ListTasksRequest.page_token]. If the next_page_token is - empty, there are no more results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.ListTasksResponse) - }, -) -_sym_db.RegisterMessage(ListTasksResponse) - -GetTaskRequest = _reflection.GeneratedProtocolMessageType( - "GetTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTASKREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for getting a task using - [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.GetTaskRequest) - }, -) -_sym_db.RegisterMessage(GetTaskRequest) - -CreateTaskRequest = _reflection.GeneratedProtocolMessageType( - "CreateTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATETASKREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - - Attributes: - parent: - Required. The queue name. For example: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - The queue must already exist. - task: - Required. The task to add. Task names have the following - format: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUE - UE_ID/tasks/TASK_ID``. The user can optionally specify a task - [name][google.cloud.tasks.v2beta3.Task.name]. If a name is not - specified then the system will generate a random unique task - id, which will be set in the task returned in the - [response][google.cloud.tasks.v2beta3.Task.name]. If - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] - is not set or is in the past then Cloud Tasks will set it to - the current time. Task De-duplication: Explicitly specifying - a task ID enables task de-duplication. If a task’s ID is - identical to that of an existing task or a task that was - deleted or executed recently then the call will fail with - [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the - task’s queue was created using Cloud Tasks, then another task - with the same name can’t be created for ~1hour after the - original task was deleted or executed. If the task’s queue was - created using queue.yaml or queue.xml, then another task with - the same name can’t be created for ~9days after the original - task was deleted or executed. Because there is an extra - lookup cost to identify duplicate task names, these - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] - calls have significantly increased latency. Using hashed - strings for the task id or for the prefix of the task id is - recommended. Choosing task ids that are sequential or have - sequential prefixes, for example using a timestamp, causes an - increase in latency and error rates in all task commands. The - infrastructure relies on an approximately uniform distribution - of task ids to store and serve tasks efficiently. - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.CreateTaskRequest) - }, -) -_sym_db.RegisterMessage(CreateTaskRequest) - -DeleteTaskRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETASKREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for deleting a task using - [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.DeleteTaskRequest) - }, -) -_sym_db.RegisterMessage(DeleteTaskRequest) - -RunTaskRequest = _reflection.GeneratedProtocolMessageType( - "RunTaskRequest", - (_message.Message,), - { - "DESCRIPTOR": _RUNTASKREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.cloudtasks_pb2", - "__doc__": """Request message for forcing a task to run now using - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. - - Attributes: - name: - Required. The task name. For example: ``projects/PROJECT_ID/lo - cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view: - The response_view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] will be returned. By - default response_view is - [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all - information is retrieved by default because some data, such as - payloads, might be desirable to return only when needed - because of its large size or because of the sensitivity of - data that it contains. Authorization for - [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires - ``cloudtasks.tasks.fullView`` `Google IAM - `_ permission on the - [Task][google.cloud.tasks.v2beta3.Task] resource. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.RunTaskRequest) - }, -) -_sym_db.RegisterMessage(RunTaskRequest) - - -DESCRIPTOR._options = None -_LISTQUEUESREQUEST.fields_by_name["parent"]._options = None -_GETQUEUEREQUEST.fields_by_name["name"]._options = None -_CREATEQUEUEREQUEST.fields_by_name["parent"]._options = None -_CREATEQUEUEREQUEST.fields_by_name["queue"]._options = None -_UPDATEQUEUEREQUEST.fields_by_name["queue"]._options = None -_DELETEQUEUEREQUEST.fields_by_name["name"]._options = None -_PURGEQUEUEREQUEST.fields_by_name["name"]._options = None -_PAUSEQUEUEREQUEST.fields_by_name["name"]._options = None -_RESUMEQUEUEREQUEST.fields_by_name["name"]._options = None -_LISTTASKSREQUEST.fields_by_name["parent"]._options = None -_GETTASKREQUEST.fields_by_name["name"]._options = None -_CREATETASKREQUEST.fields_by_name["parent"]._options = None -_CREATETASKREQUEST.fields_by_name["task"]._options = None -_DELETETASKREQUEST.fields_by_name["name"]._options = None -_RUNTASKREQUEST.fields_by_name["name"]._options = None - -_CLOUDTASKS = _descriptor.ServiceDescriptor( - name="CloudTasks", - full_name="google.cloud.tasks.v2beta3.CloudTasks", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform", - create_key=_descriptor._internal_create_key, - serialized_start=2095, - serialized_end=4948, - methods=[ - _descriptor.MethodDescriptor( - name="ListQueues", - full_name="google.cloud.tasks.v2beta3.CloudTasks.ListQueues", - index=0, - containing_service=None, - input_type=_LISTQUEUESREQUEST, - output_type=_LISTQUEUESRESPONSE, - serialized_options=b"\202\323\344\223\0021\022//v2beta3/{parent=projects/*/locations/*}/queues\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetQueue", - full_name="google.cloud.tasks.v2beta3.CloudTasks.GetQueue", - index=1, - containing_service=None, - input_type=_GETQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b"\202\323\344\223\0021\022//v2beta3/{name=projects/*/locations/*/queues/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateQueue", - full_name="google.cloud.tasks.v2beta3.CloudTasks.CreateQueue", - index=2, - containing_service=None, - input_type=_CREATEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\0028"//v2beta3/{parent=projects/*/locations/*}/queues:\005queue\332A\014parent,queue', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateQueue", - full_name="google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue", - index=3, - containing_service=None, - input_type=_UPDATEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b"\202\323\344\223\002>25/v2beta3/{queue.name=projects/*/locations/*/queues/*}:\005queue\332A\021queue,update_mask", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteQueue", - full_name="google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue", - index=4, - containing_service=None, - input_type=_DELETEQUEUEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\0021*//v2beta3/{name=projects/*/locations/*/queues/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="PurgeQueue", - full_name="google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue", - index=5, - containing_service=None, - input_type=_PURGEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\002:"5/v2beta3/{name=projects/*/locations/*/queues/*}:purge:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="PauseQueue", - full_name="google.cloud.tasks.v2beta3.CloudTasks.PauseQueue", - index=6, - containing_service=None, - input_type=_PAUSEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\002:"5/v2beta3/{name=projects/*/locations/*/queues/*}:pause:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ResumeQueue", - full_name="google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue", - index=7, - containing_service=None, - input_type=_RESUMEQUEUEREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2._QUEUE, - serialized_options=b'\202\323\344\223\002;"6/v2beta3/{name=projects/*/locations/*/queues/*}:resume:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetIamPolicy", - full_name="google.cloud.tasks.v2beta3.CloudTasks.GetIamPolicy", - index=8, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, - output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, - serialized_options=b'\202\323\344\223\002E"@/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\001*\332A\010resource', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="SetIamPolicy", - full_name="google.cloud.tasks.v2beta3.CloudTasks.SetIamPolicy", - index=9, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, - output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, - serialized_options=b'\202\323\344\223\002E"@/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\001*\332A\017resource,policy', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="TestIamPermissions", - full_name="google.cloud.tasks.v2beta3.CloudTasks.TestIamPermissions", - index=10, - containing_service=None, - input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, - output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, - serialized_options=b'\202\323\344\223\002K"F/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\001*\332A\024resource,permissions', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTasks", - full_name="google.cloud.tasks.v2beta3.CloudTasks.ListTasks", - index=11, - containing_service=None, - input_type=_LISTTASKSREQUEST, - output_type=_LISTTASKSRESPONSE, - serialized_options=b"\202\323\344\223\0029\0227/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTask", - full_name="google.cloud.tasks.v2beta3.CloudTasks.GetTask", - index=12, - containing_service=None, - input_type=_GETTASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK, - serialized_options=b"\202\323\344\223\0029\0227/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateTask", - full_name="google.cloud.tasks.v2beta3.CloudTasks.CreateTask", - index=13, - containing_service=None, - input_type=_CREATETASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\002<"7/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks:\001*\332A\013parent,task', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTask", - full_name="google.cloud.tasks.v2beta3.CloudTasks.DeleteTask", - index=14, - containing_service=None, - input_type=_DELETETASKREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\0029*7/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="RunTask", - full_name="google.cloud.tasks.v2beta3.CloudTasks.RunTask", - index=15, - containing_service=None, - input_type=_RUNTASKREQUEST, - output_type=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2._TASK, - serialized_options=b'\202\323\344\223\002@";/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_CLOUDTASKS) - -DESCRIPTOR.services_by_name["CloudTasks"] = _CLOUDTASKS - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2_grpc.py b/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2_grpc.py deleted file mode 100644 index 477db97b..00000000 --- a/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2_grpc.py +++ /dev/null @@ -1,880 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.tasks_v2beta3.proto import ( - cloudtasks_pb2 as google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2, -) -from google.cloud.tasks_v2beta3.proto import ( - queue_pb2 as google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2, -) -from google.cloud.tasks_v2beta3.proto import ( - task_pb2 as google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2, -) -from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 -from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class CloudTasksStub(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListQueues = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, - ) - self.GetQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.CreateQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.UpdateQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.DeleteQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.PurgeQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.PauseQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.ResumeQueue = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - ) - self.GetIamPolicy = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - ) - self.SetIamPolicy = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - ) - self.TestIamPermissions = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", - request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, - ) - self.ListTasks = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, - ) - self.GetTask = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.FromString, - ) - self.CreateTask = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.FromString, - ) - self.DeleteTask = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.RunTask = channel.unary_unary( - "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", - request_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.FromString, - ) - - -class CloudTasksServicer(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - def ListQueues(self, request, context): - """Lists queues. - - Queues are returned in lexicographical order. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetQueue(self, request, context): - """Gets a queue. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateQueue(self, request, context): - """Creates a queue. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless of whether - it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateQueue(self, request, context): - """Updates a queue. - - This method creates the queue if it does not exist and updates - the queue if it does exist. - - Queues created with this method allow tasks to live for a maximum of 31 - days. After a task is 31 days old, the task will be deleted regardless of whether - it was dispatched or not. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteQueue(self, request, context): - """Deletes a queue. - - This command will delete the queue even if it has tasks in it. - - Note: If you delete a queue, a queue with the same name can't be created - for 7 days. - - WARNING: Using this method may have unintended side effects if you are - using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. - Read - [Overview of Queue Management and - queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using - this method. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PurgeQueue(self, request, context): - """Purges a queue by deleting all of its tasks. - - All tasks created before this method is called are permanently deleted. - - Purge operations can take up to one minute to take effect. Tasks - might be dispatched before the purge takes effect. A purge is irreversible. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PauseQueue(self, request, context): - """Pauses the queue. - - If a queue is paused then the system will stop dispatching tasks - until the queue is resumed via - [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. Tasks can still be added - when the queue is paused. A queue is paused if its - [state][google.cloud.tasks.v2beta3.Queue.state] is [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ResumeQueue(self, request, context): - """Resume a queue. - - This method resumes a queue after it has been - [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or - [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The state of a queue is stored - in the queue's [state][google.cloud.tasks.v2beta3.Queue.state]; after calling this method it - will be set to [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. - - WARNING: Resuming many high-QPS queues at the same time can - lead to target overloading. If you are resuming high-QPS - queues, follow the 500/50/5 pattern described in - [Managing Cloud Tasks Scaling - Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetIamPolicy(self, request, context): - """Gets the access control policy for a [Queue][google.cloud.tasks.v2beta3.Queue]. - Returns an empty policy if the resource exists and does not have a policy - set. - - Authorization requires the following - [Google IAM](https://cloud.google.com/iam) permission on the specified - resource parent: - - * `cloudtasks.queues.getIamPolicy` - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def SetIamPolicy(self, request, context): - """Sets the access control policy for a [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing - policy. - - Note: The Cloud Console does not check queue-level IAM permissions yet. - Project-level permissions are required to use the Cloud Console. - - Authorization requires the following - [Google IAM](https://cloud.google.com/iam) permission on the specified - resource parent: - - * `cloudtasks.queues.setIamPolicy` - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def TestIamPermissions(self, request, context): - """Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2beta3.Queue]. - If the resource does not exist, this will return an empty set of - permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. - - Note: This operation is designed to be used for building permission-aware - UIs and command-line tools, not for authorization checking. This operation - may "fail open" without warning. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTasks(self, request, context): - """Lists the tasks in a queue. - - By default, only the [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is retrieved - due to performance considerations; - [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] controls the - subset of information which is returned. - - The tasks may be returned in any order. The ordering may change at any - time. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTask(self, request, context): - """Gets a task. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateTask(self, request, context): - """Creates a task and adds it to a queue. - - Tasks cannot be updated after creation; there is no UpdateTask command. - - * The maximum task size is 100KB. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTask(self, request, context): - """Deletes a task. - - A task can be deleted if it is scheduled or dispatched. A task - cannot be deleted if it has executed successfully or permanently - failed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def RunTask(self, request, context): - """Forces a task to run now. - - When this method is called, Cloud Tasks will dispatch the task, even if - the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or - is [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. - - This command is meant to be used for manual debugging. For - example, [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be used to retry a failed - task after a fix has been made or to manually force a task to be - dispatched now. - - The dispatched task is returned. That is, the task that is returned - contains the [status][Task.status] after the task is dispatched but - before the task is received by its target. - - If Cloud Tasks receives a successful response from the task's - target, then the task will be deleted; otherwise the task's - [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] will be reset to the time that - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was called plus the retry delay specified - in the queue's [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. - - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns - [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a - task that has already succeeded or permanently failed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_CloudTasksServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListQueues": grpc.unary_unary_rpc_method_handler( - servicer.ListQueues, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.SerializeToString, - ), - "GetQueue": grpc.unary_unary_rpc_method_handler( - servicer.GetQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "CreateQueue": grpc.unary_unary_rpc_method_handler( - servicer.CreateQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "UpdateQueue": grpc.unary_unary_rpc_method_handler( - servicer.UpdateQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "DeleteQueue": grpc.unary_unary_rpc_method_handler( - servicer.DeleteQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "PurgeQueue": grpc.unary_unary_rpc_method_handler( - servicer.PurgeQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "PauseQueue": grpc.unary_unary_rpc_method_handler( - servicer.PauseQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "ResumeQueue": grpc.unary_unary_rpc_method_handler( - servicer.ResumeQueue, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.SerializeToString, - ), - "GetIamPolicy": grpc.unary_unary_rpc_method_handler( - servicer.GetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - "SetIamPolicy": grpc.unary_unary_rpc_method_handler( - servicer.SetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - "TestIamPermissions": grpc.unary_unary_rpc_method_handler( - servicer.TestIamPermissions, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, - ), - "ListTasks": grpc.unary_unary_rpc_method_handler( - servicer.ListTasks, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.SerializeToString, - ), - "GetTask": grpc.unary_unary_rpc_method_handler( - servicer.GetTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "CreateTask": grpc.unary_unary_rpc_method_handler( - servicer.CreateTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - "DeleteTask": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "RunTask": grpc.unary_unary_rpc_method_handler( - servicer.RunTask, - request_deserializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.FromString, - response_serializer=google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.tasks.v2beta3.CloudTasks", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class CloudTasks(object): - """Cloud Tasks allows developers to manage the execution of background - work in their applications. - """ - - @staticmethod - def ListQueues( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def PurgeQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def PauseQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ResumeQueue( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_queue__pb2.Queue.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetIamPolicy( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", - google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, - google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def SetIamPolicy( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", - google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, - google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def TestIamPermissions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", - google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, - google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTasks( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def RunTask( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_task__pb2.Task.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/google/cloud/tasks_v2beta3/proto/queue_pb2.py b/google/cloud/tasks_v2beta3/proto/queue_pb2.py deleted file mode 100644 index 427354eb..00000000 --- a/google/cloud/tasks_v2beta3/proto/queue_pb2.py +++ /dev/null @@ -1,873 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta3/proto/queue.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2beta3.proto import ( - target_pb2 as google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_target__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta3/proto/queue.proto", - package="google.cloud.tasks.v2beta3", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta3B\nQueueProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n,google/cloud/tasks_v2beta3/proto/queue.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a-google/cloud/tasks_v2beta3/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xc4\x05\n\x05Queue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12O\n\x15\x61pp_engine_http_queue\x18\x03 \x01(\x0b\x32..google.cloud.tasks.v2beta3.AppEngineHttpQueueH\x00\x12;\n\x0brate_limits\x18\x04 \x01(\x0b\x32&.google.cloud.tasks.v2beta3.RateLimits\x12=\n\x0cretry_config\x18\x05 \x01(\x0b\x32\'.google.cloud.tasks.v2beta3.RetryConfig\x12\x36\n\x05state\x18\x06 \x01(\x0e\x32\'.google.cloud.tasks.v2beta3.Queue.State\x12.\n\npurge_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x1astackdriver_logging_config\x18\n \x01(\x0b\x32\x34.google.cloud.tasks.v2beta3.StackdriverLoggingConfig\x12\x39\n\x04type\x18\x0b \x01(\x0e\x32&.google.cloud.tasks.v2beta3.Queue.TypeB\x03\xe0\x41\x05"E\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03"0\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04PULL\x10\x01\x12\x08\n\x04PUSH\x10\x02:\\\xea\x41Y\n\x1f\x63loudtasks.googleapis.com/Queue\x12\x36projects/{project}/locations/{location}/queues/{queue}B\x0c\n\nqueue_type"j\n\nRateLimits\x12!\n\x19max_dispatches_per_second\x18\x01 \x01(\x01\x12\x16\n\x0emax_burst_size\x18\x02 \x01(\x05\x12!\n\x19max_concurrent_dispatches\x18\x03 \x01(\x05"\xd1\x01\n\x0bRetryConfig\x12\x14\n\x0cmax_attempts\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmin_backoff\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmax_backoff\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05"2\n\x18StackdriverLoggingConfig\x12\x16\n\x0esampling_ratio\x18\x01 \x01(\x01\x42o\n\x1e\x63om.google.cloud.tasks.v2beta3B\nQueueProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasksb\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_target__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_QUEUE_STATE = _descriptor.EnumDescriptor( - name="State", - full_name="google.cloud.tasks.v2beta3.Queue.State", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PAUSED", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISABLED", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=760, - serialized_end=829, -) -_sym_db.RegisterEnumDescriptor(_QUEUE_STATE) - -_QUEUE_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.tasks.v2beta3.Queue.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PULL", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PUSH", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=831, - serialized_end=879, -) -_sym_db.RegisterEnumDescriptor(_QUEUE_TYPE) - - -_QUEUE = _descriptor.Descriptor( - name="Queue", - full_name="google.cloud.tasks.v2beta3.Queue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.Queue.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_http_queue", - full_name="google.cloud.tasks.v2beta3.Queue.app_engine_http_queue", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="rate_limits", - full_name="google.cloud.tasks.v2beta3.Queue.rate_limits", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retry_config", - full_name="google.cloud.tasks.v2beta3.Queue.retry_config", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.tasks.v2beta3.Queue.state", - index=4, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="purge_time", - full_name="google.cloud.tasks.v2beta3.Queue.purge_time", - index=5, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="stackdriver_logging_config", - full_name="google.cloud.tasks.v2beta3.Queue.stackdriver_logging_config", - index=6, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.tasks.v2beta3.Queue.type", - index=7, - number=11, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\005", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_QUEUE_STATE, _QUEUE_TYPE,], - serialized_options=b"\352AY\n\037cloudtasks.googleapis.com/Queue\0226projects/{project}/locations/{location}/queues/{queue}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="queue_type", - full_name="google.cloud.tasks.v2beta3.Queue.queue_type", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=279, - serialized_end=987, -) - - -_RATELIMITS = _descriptor.Descriptor( - name="RateLimits", - full_name="google.cloud.tasks.v2beta3.RateLimits", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="max_dispatches_per_second", - full_name="google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_burst_size", - full_name="google.cloud.tasks.v2beta3.RateLimits.max_burst_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_concurrent_dispatches", - full_name="google.cloud.tasks.v2beta3.RateLimits.max_concurrent_dispatches", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=989, - serialized_end=1095, -) - - -_RETRYCONFIG = _descriptor.Descriptor( - name="RetryConfig", - full_name="google.cloud.tasks.v2beta3.RetryConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="max_attempts", - full_name="google.cloud.tasks.v2beta3.RetryConfig.max_attempts", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_retry_duration", - full_name="google.cloud.tasks.v2beta3.RetryConfig.max_retry_duration", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="min_backoff", - full_name="google.cloud.tasks.v2beta3.RetryConfig.min_backoff", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_backoff", - full_name="google.cloud.tasks.v2beta3.RetryConfig.max_backoff", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_doublings", - full_name="google.cloud.tasks.v2beta3.RetryConfig.max_doublings", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1098, - serialized_end=1307, -) - - -_STACKDRIVERLOGGINGCONFIG = _descriptor.Descriptor( - name="StackdriverLoggingConfig", - full_name="google.cloud.tasks.v2beta3.StackdriverLoggingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sampling_ratio", - full_name="google.cloud.tasks.v2beta3.StackdriverLoggingConfig.sampling_ratio", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1309, - serialized_end=1359, -) - -_QUEUE.fields_by_name[ - "app_engine_http_queue" -].message_type = ( - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_target__pb2._APPENGINEHTTPQUEUE -) -_QUEUE.fields_by_name["rate_limits"].message_type = _RATELIMITS -_QUEUE.fields_by_name["retry_config"].message_type = _RETRYCONFIG -_QUEUE.fields_by_name["state"].enum_type = _QUEUE_STATE -_QUEUE.fields_by_name[ - "purge_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_QUEUE.fields_by_name[ - "stackdriver_logging_config" -].message_type = _STACKDRIVERLOGGINGCONFIG -_QUEUE.fields_by_name["type"].enum_type = _QUEUE_TYPE -_QUEUE_STATE.containing_type = _QUEUE -_QUEUE_TYPE.containing_type = _QUEUE -_QUEUE.oneofs_by_name["queue_type"].fields.append( - _QUEUE.fields_by_name["app_engine_http_queue"] -) -_QUEUE.fields_by_name["app_engine_http_queue"].containing_oneof = _QUEUE.oneofs_by_name[ - "queue_type" -] -_RETRYCONFIG.fields_by_name[ - "max_retry_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYCONFIG.fields_by_name[ - "min_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYCONFIG.fields_by_name[ - "max_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -DESCRIPTOR.message_types_by_name["Queue"] = _QUEUE -DESCRIPTOR.message_types_by_name["RateLimits"] = _RATELIMITS -DESCRIPTOR.message_types_by_name["RetryConfig"] = _RETRYCONFIG -DESCRIPTOR.message_types_by_name["StackdriverLoggingConfig"] = _STACKDRIVERLOGGINGCONFIG -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Queue = _reflection.GeneratedProtocolMessageType( - "Queue", - (_message.Message,), - { - "DESCRIPTOR": _QUEUE, - "__module__": "google.cloud.tasks_v2beta3.proto.queue_pb2", - "__doc__": """A queue is a container of related tasks. Queues are configured to - manage how those tasks are dispatched. Configurable properties include - rate limits, retry options, queue types, and others. - - Attributes: - name: - Caller-specified and required in [CreateQueue][google.cloud.ta - sks.v2beta3.CloudTasks.CreateQueue], after which it becomes - output only. The queue name. The queue name must have the - following format: - ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For more - information, see `Identifying projects - `_ - ``LOCATION_ID`` - is the canonical ID for the queue’s location. The list of - available locations can be obtained by calling [ListLocatio - ns][google.cloud.location.Locations.ListLocations]. For - more information, see - https://cloud.google.com/about/locations/. - ``QUEUE_ID`` can - contain letters ([A-Za-z]), numbers ([0-9]), or hyphens - (-). The maximum length is 100 characters. - app_engine_http_queue: - [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQ - ueue] settings apply only to [App Engine - tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest] in - this queue. [Http - tasks][google.cloud.tasks.v2beta3.HttpRequest] are not - affected by this proto. - rate_limits: - Rate limits for task dispatches. - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] - and - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] - are related because they both control task attempts. However - they control task attempts in different ways: - - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] - controls the total rate of dispatches from a queue - (i.e. all traffic dispatched from the queue, regardless of - whether the dispatch is from a first attempt or a retry). - - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] - controls what happens to particular a task after its first - attempt fails. That is, - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] - controls task retries (the second attempt, third attempt, - etc). The queue’s actual dispatch rate is the result of: - - Number of tasks in the queue - User-specified throttling: - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits], - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config], - and the [queue’s - state][google.cloud.tasks.v2beta3.Queue.state]. - System - throttling due to ``429`` (Too Many Requests) or ``503`` - (Service Unavailable) responses from the worker, high error - rates, or to smooth sudden large traffic spikes. - retry_config: - Settings that determine the retry behavior. - For tasks - created using Cloud Tasks: the queue-level retry settings - apply to all tasks in the queue that were created using Cloud - Tasks. Retry settings cannot be set on individual tasks. - - For tasks created using the App Engine SDK: the queue-level - retry settings apply to all tasks in the queue which do not - have retry settings explicitly set on the task and were - created by the App Engine SDK. See `App Engine - documentation `_. - state: - Output only. The state of the queue. ``state`` can only be - changed by called [PauseQueue][google.cloud.tasks.v2beta3.Clou - dTasks.PauseQueue], [ResumeQueue][google.cloud.tasks.v2beta3.C - loudTasks.ResumeQueue], or uploading `queue.yaml/xml `_. [U - pdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] - cannot be used to change ``state``. - purge_time: - Output only. The last time this queue was purged. All tasks - that were - [created][google.cloud.tasks.v2beta3.Task.create_time] before - this time were purged. A queue can be purged using [PurgeQueu - e][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue], the `App - Engine Task Queue SDK, or the Cloud Console `_. Purge - time will be truncated to the nearest microsecond. Purge time - will be unset if the queue has never been purged. - stackdriver_logging_config: - Configuration options for writing logs to `Stackdriver Logging - `_. If this field is - unset, then no logs are written. - type: - Immutable. The type of a queue (push or pull). ``Queue.type`` - is an immutable property of the queue that is set at the queue - creation time. When left unspecified, the default value of - ``PUSH`` is selected. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.Queue) - }, -) -_sym_db.RegisterMessage(Queue) - -RateLimits = _reflection.GeneratedProtocolMessageType( - "RateLimits", - (_message.Message,), - { - "DESCRIPTOR": _RATELIMITS, - "__module__": "google.cloud.tasks_v2beta3.proto.queue_pb2", - "__doc__": """Rate limits. This message determines the maximum rate that tasks can - be dispatched by a queue, regardless of whether the dispatch is a - first task attempt or a retry. Note: The debugging command, - [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask], will run a - task even if the queue has reached its - [RateLimits][google.cloud.tasks.v2beta3.RateLimits]. - - Attributes: - max_dispatches_per_second: - The maximum rate at which tasks are dispatched from this - queue. If unspecified when the queue is created, Cloud Tasks - will pick the default. - For [App Engine - queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], the - maximum allowed value is 500. This field has the same - meaning as `rate in queue.yaml/xml `_. - max_burst_size: - Output only. The max burst size. Max burst size limits how - fast tasks in queue are processed when many tasks are in the - queue and the rate is high. This field allows the queue to - have a high rate so processing starts shortly after a task is - enqueued, but still limits resource usage when many tasks are - enqueued in a short period of time. The `token bucket - `_ algorithm is used - to control the rate of task dispatches. Each queue has a token - bucket that holds tokens, up to the maximum specified by - ``max_burst_size``. Each time a task is dispatched, a token is - removed from the bucket. Tasks will be dispatched until the - queue’s bucket runs out of tokens. The bucket will be - continuously refilled with new tokens based on [max_dispatches - _per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatc - hes_per_second]. Cloud Tasks will pick the value of - ``max_burst_size`` based on the value of [max_dispatches_per_s - econd][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_pe - r_second]. For App Engine queues that were created or updated - using ``queue.yaml/xml``, ``max_burst_size`` is equal to - `bucket_size `_. Since - ``max_burst_size`` is output only, if [UpdateQueue][google.clo - ud.tasks.v2beta3.CloudTasks.UpdateQueue] is called on a queue - created by ``queue.yaml/xml``, ``max_burst_size`` will be - reset based on the value of [max_dispatches_per_second][google - .cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second], - regardless of whether [max_dispatches_per_second][google.cloud - .tasks.v2beta3.RateLimits.max_dispatches_per_second] is - updated. - max_concurrent_dispatches: - The maximum number of concurrent tasks that Cloud Tasks allows - to be dispatched for this queue. After this threshold has been - reached, Cloud Tasks stops dispatching tasks until the number - of concurrent requests decreases. If unspecified when the - queue is created, Cloud Tasks will pick the default. The - maximum allowed value is 5,000. This field has the same - meaning as `max_concurrent_requests in queue.yaml/xml `_. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.RateLimits) - }, -) -_sym_db.RegisterMessage(RateLimits) - -RetryConfig = _reflection.GeneratedProtocolMessageType( - "RetryConfig", - (_message.Message,), - { - "DESCRIPTOR": _RETRYCONFIG, - "__module__": "google.cloud.tasks_v2beta3.proto.queue_pb2", - "__doc__": """Retry config. These settings determine when a failed task attempt is - retried. - - Attributes: - max_attempts: - Number of attempts per task. Cloud Tasks will attempt the - task ``max_attempts`` times (that is, if the first attempt - fails, then there will be ``max_attempts - 1`` retries). Must - be >= -1. If unspecified when the queue is created, Cloud - Tasks will pick the default. -1 indicates unlimited attempts. - This field has the same meaning as `task_retry_limit in - queue.yaml/xml `_. - max_retry_duration: - If positive, ``max_retry_duration`` specifies the time limit - for retrying a failed task, measured from when the task was - first attempted. Once ``max_retry_duration`` time has passed - *and* the task has been attempted [max_attempts][google.cloud. - tasks.v2beta3.RetryConfig.max_attempts] times, no further - attempts will be made and the task will be deleted. If zero, - then the task age is unlimited. If unspecified when the queue - is created, Cloud Tasks will pick the default. - ``max_retry_duration`` will be truncated to the nearest - second. This field has the same meaning as `task_age_limit in - queue.yaml/xml `_. - min_backoff: - A task will be - [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] for - retry between [min_backoff][google.cloud.tasks.v2beta3.RetryCo - nfig.min_backoff] and [max_backoff][google.cloud.tasks.v2beta3 - .RetryConfig.max_backoff] duration after it fails, if the - queue’s [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] - specifies that the task should be retried. If unspecified - when the queue is created, Cloud Tasks will pick the default. - ``min_backoff`` will be truncated to the nearest second. This - field has the same meaning as `min_backoff_seconds in - queue.yaml/xml `_. - max_backoff: - A task will be - [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] for - retry between [min_backoff][google.cloud.tasks.v2beta3.RetryCo - nfig.min_backoff] and [max_backoff][google.cloud.tasks.v2beta3 - .RetryConfig.max_backoff] duration after it fails, if the - queue’s [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] - specifies that the task should be retried. If unspecified - when the queue is created, Cloud Tasks will pick the default. - ``max_backoff`` will be truncated to the nearest second. This - field has the same meaning as `max_backoff_seconds in - queue.yaml/xml `_. - max_doublings: - The time between retries will double ``max_doublings`` times. - A task’s retry interval starts at [min_backoff][google.cloud.t - asks.v2beta3.RetryConfig.min_backoff], then doubles - ``max_doublings`` times, then increases linearly, and finally - retries at intervals of [max_backoff][google.cloud.tasks.v2bet - a3.RetryConfig.max_backoff] up to [max_attempts][google.cloud. - tasks.v2beta3.RetryConfig.max_attempts] times. For example, - if [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_ba - ckoff] is 10s, [max_backoff][google.cloud.tasks.v2beta3.RetryC - onfig.max_backoff] is 300s, and ``max_doublings`` is 3, then - the a task will first be retried in 10s. The retry interval - will double three times, and then increase linearly by 2^3 \* - 10s. Finally, the task will retry at intervals of [max_backoff - ][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] until - the task has been attempted [max_attempts][google.cloud.tasks. - v2beta3.RetryConfig.max_attempts] times. Thus, the requests - will retry at 10s, 20s, 40s, 80s, 160s, 240s, 300s, 300s, …. - If unspecified when the queue is created, Cloud Tasks will - pick the default. This field has the same meaning as - `max_doublings in queue.yaml/xml ` - __. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.RetryConfig) - }, -) -_sym_db.RegisterMessage(RetryConfig) - -StackdriverLoggingConfig = _reflection.GeneratedProtocolMessageType( - "StackdriverLoggingConfig", - (_message.Message,), - { - "DESCRIPTOR": _STACKDRIVERLOGGINGCONFIG, - "__module__": "google.cloud.tasks_v2beta3.proto.queue_pb2", - "__doc__": """Configuration options for writing logs to `Stackdriver Logging - `_. - - Attributes: - sampling_ratio: - Specifies the fraction of operations to write to `Stackdriver - Logging `_. This - field may contain any value between 0.0 and 1.0, inclusive. - 0.0 is the default and means that no operations are logged. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.StackdriverLoggingConfig) - }, -) -_sym_db.RegisterMessage(StackdriverLoggingConfig) - - -DESCRIPTOR._options = None -_QUEUE.fields_by_name["type"]._options = None -_QUEUE._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta3/proto/queue_pb2_grpc.py b/google/cloud/tasks_v2beta3/proto/queue_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2beta3/proto/queue_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2beta3/proto/target_pb2.py b/google/cloud/tasks_v2beta3/proto/target_pb2.py deleted file mode 100644 index c3df2e3a..00000000 --- a/google/cloud/tasks_v2beta3/proto/target_pb2.py +++ /dev/null @@ -1,1213 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta3/proto/target.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta3/proto/target.proto", - package="google.cloud.tasks.v2beta3", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta3B\013TargetProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n-google/cloud/tasks_v2beta3/proto/target.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x1cgoogle/api/annotations.proto"\xf0\x02\n\x0bHttpRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\x12;\n\x0bhttp_method\x18\x02 \x01(\x0e\x32&.google.cloud.tasks.v2beta3.HttpMethod\x12\x45\n\x07headers\x18\x03 \x03(\x0b\x32\x34.google.cloud.tasks.v2beta3.HttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12=\n\x0boauth_token\x18\x05 \x01(\x0b\x32&.google.cloud.tasks.v2beta3.OAuthTokenH\x00\x12;\n\noidc_token\x18\x06 \x01(\x0b\x32%.google.cloud.tasks.v2beta3.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"g\n\x12\x41ppEngineHttpQueue\x12Q\n\x1b\x61pp_engine_routing_override\x18\x01 \x01(\x0b\x32,.google.cloud.tasks.v2beta3.AppEngineRouting"\xc1\x02\n\x14\x41ppEngineHttpRequest\x12;\n\x0bhttp_method\x18\x01 \x01(\x0e\x32&.google.cloud.tasks.v2beta3.HttpMethod\x12H\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32,.google.cloud.tasks.v2beta3.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12N\n\x07headers\x18\x04 \x03(\x0b\x32=.google.cloud.tasks.v2beta3.AppEngineHttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42p\n\x1e\x63om.google.cloud.tasks.v2beta3B\x0bTargetProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasksb\x06proto3', - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], -) - -_HTTPMETHOD = _descriptor.EnumDescriptor( - name="HttpMethod", - full_name="google.cloud.tasks.v2beta3.HttpMethod", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="HTTP_METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POST", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GET", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="HEAD", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PUT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DELETE", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PATCH", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OPTIONS", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1115, - serialized_end=1230, -) -_sym_db.RegisterEnumDescriptor(_HTTPMETHOD) - -HttpMethod = enum_type_wrapper.EnumTypeWrapper(_HTTPMETHOD) -HTTP_METHOD_UNSPECIFIED = 0 -POST = 1 -GET = 2 -HEAD = 3 -PUT = 4 -DELETE = 5 -PATCH = 6 -OPTIONS = 7 - - -_HTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( - name="HeadersEntry", - full_name="google.cloud.tasks.v2beta3.HttpRequest.HeadersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.tasks.v2beta3.HttpRequest.HeadersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.tasks.v2beta3.HttpRequest.HeadersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=406, - serialized_end=452, -) - -_HTTPREQUEST = _descriptor.Descriptor( - name="HttpRequest", - full_name="google.cloud.tasks.v2beta3.HttpRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="google.cloud.tasks.v2beta3.HttpRequest.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="http_method", - full_name="google.cloud.tasks.v2beta3.HttpRequest.http_method", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="headers", - full_name="google.cloud.tasks.v2beta3.HttpRequest.headers", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="body", - full_name="google.cloud.tasks.v2beta3.HttpRequest.body", - index=3, - number=4, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="oauth_token", - full_name="google.cloud.tasks.v2beta3.HttpRequest.oauth_token", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="oidc_token", - full_name="google.cloud.tasks.v2beta3.HttpRequest.oidc_token", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_HTTPREQUEST_HEADERSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="authorization_header", - full_name="google.cloud.tasks.v2beta3.HttpRequest.authorization_header", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=108, - serialized_end=476, -) - - -_APPENGINEHTTPQUEUE = _descriptor.Descriptor( - name="AppEngineHttpQueue", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpQueue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="app_engine_routing_override", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=478, - serialized_end=581, -) - - -_APPENGINEHTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( - name="HeadersEntry", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.HeadersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.HeadersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.HeadersEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=406, - serialized_end=452, -) - -_APPENGINEHTTPREQUEST = _descriptor.Descriptor( - name="AppEngineHttpRequest", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="http_method", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.http_method", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_routing", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="relative_uri", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.relative_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="headers", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.headers", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="body", - full_name="google.cloud.tasks.v2beta3.AppEngineHttpRequest.body", - index=4, - number=5, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_APPENGINEHTTPREQUEST_HEADERSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=584, - serialized_end=905, -) - - -_APPENGINEROUTING = _descriptor.Descriptor( - name="AppEngineRouting", - full_name="google.cloud.tasks.v2beta3.AppEngineRouting", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service", - full_name="google.cloud.tasks.v2beta3.AppEngineRouting.service", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.cloud.tasks.v2beta3.AppEngineRouting.version", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="instance", - full_name="google.cloud.tasks.v2beta3.AppEngineRouting.instance", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="host", - full_name="google.cloud.tasks.v2beta3.AppEngineRouting.host", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=907, - serialized_end=991, -) - - -_OAUTHTOKEN = _descriptor.Descriptor( - name="OAuthToken", - full_name="google.cloud.tasks.v2beta3.OAuthToken", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.cloud.tasks.v2beta3.OAuthToken.service_account_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="scope", - full_name="google.cloud.tasks.v2beta3.OAuthToken.scope", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=993, - serialized_end=1051, -) - - -_OIDCTOKEN = _descriptor.Descriptor( - name="OidcToken", - full_name="google.cloud.tasks.v2beta3.OidcToken", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.cloud.tasks.v2beta3.OidcToken.service_account_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="audience", - full_name="google.cloud.tasks.v2beta3.OidcToken.audience", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1053, - serialized_end=1113, -) - -_HTTPREQUEST_HEADERSENTRY.containing_type = _HTTPREQUEST -_HTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD -_HTTPREQUEST.fields_by_name["headers"].message_type = _HTTPREQUEST_HEADERSENTRY -_HTTPREQUEST.fields_by_name["oauth_token"].message_type = _OAUTHTOKEN -_HTTPREQUEST.fields_by_name["oidc_token"].message_type = _OIDCTOKEN -_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( - _HTTPREQUEST.fields_by_name["oauth_token"] -) -_HTTPREQUEST.fields_by_name[ - "oauth_token" -].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] -_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( - _HTTPREQUEST.fields_by_name["oidc_token"] -) -_HTTPREQUEST.fields_by_name[ - "oidc_token" -].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] -_APPENGINEHTTPQUEUE.fields_by_name[ - "app_engine_routing_override" -].message_type = _APPENGINEROUTING -_APPENGINEHTTPREQUEST_HEADERSENTRY.containing_type = _APPENGINEHTTPREQUEST -_APPENGINEHTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD -_APPENGINEHTTPREQUEST.fields_by_name[ - "app_engine_routing" -].message_type = _APPENGINEROUTING -_APPENGINEHTTPREQUEST.fields_by_name[ - "headers" -].message_type = _APPENGINEHTTPREQUEST_HEADERSENTRY -DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST -DESCRIPTOR.message_types_by_name["AppEngineHttpQueue"] = _APPENGINEHTTPQUEUE -DESCRIPTOR.message_types_by_name["AppEngineHttpRequest"] = _APPENGINEHTTPREQUEST -DESCRIPTOR.message_types_by_name["AppEngineRouting"] = _APPENGINEROUTING -DESCRIPTOR.message_types_by_name["OAuthToken"] = _OAUTHTOKEN -DESCRIPTOR.message_types_by_name["OidcToken"] = _OIDCTOKEN -DESCRIPTOR.enum_types_by_name["HttpMethod"] = _HTTPMETHOD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -HttpRequest = _reflection.GeneratedProtocolMessageType( - "HttpRequest", - (_message.Message,), - { - "HeadersEntry": _reflection.GeneratedProtocolMessageType( - "HeadersEntry", - (_message.Message,), - { - "DESCRIPTOR": _HTTPREQUEST_HEADERSENTRY, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.HttpRequest.HeadersEntry) - }, - ), - "DESCRIPTOR": _HTTPREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2", - "__doc__": """HTTP request. The task will be pushed to the worker as an HTTP - request. If the worker or the redirected worker acknowledges the task - by returning a successful HTTP response code ([``200`` - ``299``]), - the task will be removed from the queue. If any other HTTP response - code is returned or no response is received, the task will be retried - according to the following: - User-specified throttling: [retry - configuration][google.cloud.tasks.v2beta3.Queue.retry_config], [rate - limits][google.cloud.tasks.v2beta3.Queue.rate_limits], and the - [queue’s state][google.cloud.tasks.v2beta3.Queue.state]. - System - throttling: To prevent the worker from overloading, Cloud Tasks may - temporarily reduce the queue’s effective rate. User-specified - settings will not be changed. System throttling happens because: - - Cloud Tasks backs off on all errors. Normally the backoff specified - in [rate limits][google.cloud.tasks.v2beta3.Queue.rate_limits] will - be used. But if the worker returns ``429`` (Too Many Requests), - ``503`` (Service Unavailable), or the rate of errors is high, Cloud - Tasks will use a higher backoff rate. The retry specified in the - ``Retry-After`` HTTP response header is considered. - To prevent - traffic spikes and to smooth sudden increases in traffic, - dispatches ramp up slowly when the queue is newly created or idle and - if large numbers of tasks suddenly become available to dispatch (due - to spikes in create task rates, the queue being unpaused, or many - tasks that are scheduled at the same time). - - Attributes: - url: - Required. The full url path that the request will be sent to. - This string must begin with either “http://” or “https://”. - Some examples are: ``http://acme.com`` and - ``https://acme.com/sales:8080``. Cloud Tasks will encode some - characters for safety and compatibility. The maximum allowed - URL length is 2083 characters after encoding. The - ``Location`` header response from a redirect response [``300`` - - ``399``] may be followed. The redirect is not counted as a - separate attempt. - http_method: - The HTTP method to use for the request. The default is POST. - headers: - HTTP request headers. This map contains the header field - names and values. Headers can be set when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - These headers represent a subset of the headers that will - accompany the task’s HTTP request. Some HTTP request headers - will be ignored or replaced. A partial list of headers that - will be ignored or replaced is: - Host: This will be - computed by Cloud Tasks and derived from - [HttpRequest.url][google.cloud.tasks.v2beta3.HttpRequest.url]. - - Content-Length: This will be computed by Cloud Tasks. - - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. - - X-Google-*: Google use only. - X-AppEngine-*: Google use - only. ``Content-Type`` won’t be set by Cloud Tasks. You can - explicitly set ``Content-Type`` to a media type when the [task - is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/octet-stream"`` or ``"application/json"``. - Headers which can have multiple values (according to RFC2616) - can be specified using comma-separated values. The size of - the headers must be less than 80KB. - body: - HTTP request body. A request body is allowed only if the - [HTTP - method][google.cloud.tasks.v2beta3.HttpRequest.http_method] is - POST, PUT, or PATCH. It is an error to set body on a task with - an incompatible - [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. - authorization_header: - The mode for generating an ``Authorization`` header for HTTP - requests. If specified, all ``Authorization`` headers in the - [HttpRequest.headers][google.cloud.tasks.v2beta3.HttpRequest.h - eaders] field will be overridden. - oauth_token: - If specified, an `OAuth token - `\_ - will be generated and attached as an ``Authorization`` header - in the HTTP request. This type of authorization should - generally only be used when calling Google APIs hosted on - \*.googleapis.com. - oidc_token: - If specified, an `OIDC `_ token will be generated and - attached as an ``Authorization`` header in the HTTP request. - This type of authorization can be used for many scenarios, - including calling Cloud Run, or endpoints where you intend to - validate the token yourself. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.HttpRequest) - }, -) -_sym_db.RegisterMessage(HttpRequest) -_sym_db.RegisterMessage(HttpRequest.HeadersEntry) - -AppEngineHttpQueue = _reflection.GeneratedProtocolMessageType( - "AppEngineHttpQueue", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEHTTPQUEUE, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2", - "__doc__": """App Engine HTTP queue. The task will be delivered to the App Engine - application hostname specified by its - [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] - and [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpReq - uest]. The documentation for [AppEngineHttpRequest][google.cloud.tasks - .v2beta3.AppEngineHttpRequest] explains how the task’s host URL is - constructed. Using - [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] - requires ```appengine.applications.get`` - `\_ - Google IAM permission for the project and the following scope: - ``https://www.googleapis.com/auth/cloud-platform`` - - Attributes: - app_engine_routing_override: - Overrides for the [task-level app_engine_routing][google.cloud - .tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. If - set, ``app_engine_routing_override`` is used for all tasks in - the queue, no matter what the setting is for the [task-level a - pp_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpReq - uest.app_engine_routing]. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.AppEngineHttpQueue) - }, -) -_sym_db.RegisterMessage(AppEngineHttpQueue) - -AppEngineHttpRequest = _reflection.GeneratedProtocolMessageType( - "AppEngineHttpRequest", - (_message.Message,), - { - "HeadersEntry": _reflection.GeneratedProtocolMessageType( - "HeadersEntry", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEHTTPREQUEST_HEADERSENTRY, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.AppEngineHttpRequest.HeadersEntry) - }, - ), - "DESCRIPTOR": _APPENGINEHTTPREQUEST, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2", - "__doc__": """App Engine HTTP request. The message defines the HTTP request that is - sent to an App Engine app when the task is dispatched. Using [AppEngi - neHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] - requires ```appengine.applications.get`` - `\_ - Google IAM permission for the project and the following scope: - ``https://www.googleapis.com/auth/cloud-platform`` The task will be - delivered to the App Engine app which belongs to the same project as - the queue. For more information, see `How Requests are Routed - `_ and how routing is affected by `dispatch files `_. - Traffic is encrypted during transport and never leaves Google - datacenters. Because this traffic is carried over a communication - mechanism internal to Google, you cannot explicitly set the protocol - (for example, HTTP or HTTPS). The request to the handler, however, - will appear to have used the HTTP protocol. The - [AppEngineRouting][google.cloud.tasks.v2beta3.AppEngineRouting] used - to construct the URL that the task is delivered to can be set at the - queue-level or task-level: - If set, [app_engine_routing_override - ][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_ove - rride] is used for all tasks in the queue, no matter what the - setting is for the [task-level app_engine_routing][google.cloud. - tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. The ``url`` - that the task will be sent to is: - ``url =`` - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] ``+`` [ - relative_uri][google.cloud.tasks.v2beta3.AppEngineHttpRequest.relative - _uri] Tasks can be dispatched to secure app handlers, unsecure app - handlers, and URIs restricted with ```login: admin`` `_. Because - tasks are not run as any user, they cannot be dispatched to URIs - restricted with ```login: required`` `_ Task dispatches also do not - follow redirects. The task attempt has succeeded if the app’s request - handler returns an HTTP response code in the range [``200`` - - ``299``]. The task attempt has failed if the app’s handler returns a - non-2xx response code or Cloud Tasks does not receive response before - the [deadline][google.cloud.tasks.v2beta3.Task.dispatch_deadline]. - Failed tasks will be retried according to the [retry - configuration][google.cloud.tasks.v2beta3.Queue.retry_config]. ``503`` - (Service Unavailable) is considered an App Engine system error instead - of an application error and will cause Cloud Tasks’ traffic congestion - control to temporarily throttle the queue’s dispatches. Unlike other - types of task targets, a ``429`` (Too Many Requests) response from an - app handler does not cause traffic congestion control to throttle the - queue. - - Attributes: - http_method: - The HTTP method to use for the request. The default is POST. - The app’s request handler for the task’s target URL must be - able to handle HTTP requests with this http_method, otherwise - the task attempt fails with error code 405 (Method Not - Allowed). See `Writing a push task request handler `_ and the App - Engine documentation for your runtime on `How Requests are - Handled - `_. - app_engine_routing: - Task-level setting for App Engine routing. If set, [app_engin - e_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQu - eue.app_engine_routing_override] is used for all tasks in the - queue, no matter what the setting is for the [task-level app_e - ngine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest - .app_engine_routing]. - relative_uri: - The relative URI. The relative URI must begin with “/” and - must be a valid HTTP relative URI. It can contain a path and - query string arguments. If the relative URI is empty, then the - root path “/” will be used. No spaces are allowed, and the - maximum length allowed is 2083 characters. - headers: - HTTP request headers. This map contains the header field - names and values. Headers can be set when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - Repeated headers are not supported but a header value can - contain commas. Cloud Tasks sets some headers to default - values: - ``User-Agent``: By default, this header is - ``"AppEngine-Google; (+http://code.google.com/appengine)"``. - This header can be modified, but Cloud Tasks will append - ``"AppEngine-Google; (+http://code.google.com/appengine)"`` to - the modified ``User-Agent``. If the task has a - [body][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body], - Cloud Tasks sets the following headers: - ``Content-Type``: - By default, the ``Content-Type`` header is set to - ``"application/octet-stream"``. The default can be overridden - by explicitly setting ``Content-Type`` to a particular - media type when the [task is - created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. - For example, ``Content-Type`` can be set to - ``"application/json"``. - ``Content-Length``: This is - computed by Cloud Tasks. This value is output only. It - cannot be changed. The headers below cannot be set or - overridden: - ``Host`` - ``X-Google-*`` - - ``X-AppEngine-*`` In addition, Cloud Tasks sets some headers - when the task is dispatched, such as headers containing - information about the task; see `request headers - `_. These headers are set - only when the task is dispatched, so they are not visible when - the task is returned in a Cloud Tasks response. Although - there is no specific limit for the maximum number of headers - or the size, there is a limit on the maximum size of the - [Task][google.cloud.tasks.v2beta3.Task]. For more information, - see the - [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] - documentation. - body: - HTTP request body. A request body is allowed only if the HTTP - method is POST or PUT. It is an error to set a body on a task - with an incompatible - [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.AppEngineHttpRequest) - }, -) -_sym_db.RegisterMessage(AppEngineHttpRequest) -_sym_db.RegisterMessage(AppEngineHttpRequest.HeadersEntry) - -AppEngineRouting = _reflection.GeneratedProtocolMessageType( - "AppEngineRouting", - (_message.Message,), - { - "DESCRIPTOR": _APPENGINEROUTING, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2", - "__doc__": """App Engine Routing. Defines routing characteristics specific to App - Engine - service, version, and instance. For more information about - services, versions, and instances see `An Overview of App Engine - `_, `Microservices Architecture on Google App Engine - `_, `App Engine Standard request routing - `_, and `App Engine Flex request routing - `_. - - Attributes: - service: - App service. By default, the task is sent to the service - which is the default service when the task is attempted. For - some queues or tasks which were created using the App Engine - Task Queue API, - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable into [service][google.cloud.tasks.v2beta3.AppEngi - neRouting.service], [version][google.cloud.tasks.v2beta3.AppEn - gineRouting.version], and [instance][google.cloud.tasks.v2beta - 3.AppEngineRouting.instance]. For example, some tasks which - were created using the App Engine SDK use a custom domain - name; custom domains are not parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable, then [service][google.cloud.tasks.v2beta3.AppEng - ineRouting.service], [version][google.cloud.tasks.v2beta3.AppE - ngineRouting.version], and [instance][google.cloud.tasks.v2bet - a3.AppEngineRouting.instance] are the empty string. - version: - App version. By default, the task is sent to the version - which is the default version when the task is attempted. For - some queues or tasks which were created using the App Engine - Task Queue API, - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable into [service][google.cloud.tasks.v2beta3.AppEngi - neRouting.service], [version][google.cloud.tasks.v2beta3.AppEn - gineRouting.version], and [instance][google.cloud.tasks.v2beta - 3.AppEngineRouting.instance]. For example, some tasks which - were created using the App Engine SDK use a custom domain - name; custom domains are not parsed by Cloud Tasks. If - [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is - not parsable, then [service][google.cloud.tasks.v2beta3.AppEng - ineRouting.service], [version][google.cloud.tasks.v2beta3.AppE - ngineRouting.version], and [instance][google.cloud.tasks.v2bet - a3.AppEngineRouting.instance] are the empty string. - instance: - App instance. By default, the task is sent to an instance - which is available when the task is attempted. Requests can - only be sent to a specific instance if `manual scaling is used - in App Engine Standard - `_. - App Engine Flex does not support instances. For more - information, see `App Engine Standard request routing - `_ and `App Engine Flex request routing - `_. - host: - Output only. The host that the task is sent to. The host is - constructed from the domain name of the app associated with - the queue’s project ID (for example .appspot.com), and the [se - rvice][google.cloud.tasks.v2beta3.AppEngineRouting.service], [ - version][google.cloud.tasks.v2beta3.AppEngineRouting.version], - and [instance][google.cloud.tasks.v2beta3.AppEngineRouting.ins - tance]. Tasks which were created using the App Engine SDK - might have a custom domain name. For more information, see - `How Requests are Routed - `_. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.AppEngineRouting) - }, -) -_sym_db.RegisterMessage(AppEngineRouting) - -OAuthToken = _reflection.GeneratedProtocolMessageType( - "OAuthToken", - (_message.Message,), - { - "DESCRIPTOR": _OAUTHTOKEN, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2", - "__doc__": """Contains information needed for generating an `OAuth token - `_. This - type of authorization should generally only be used when calling - Google APIs hosted on \*.googleapis.com. - - Attributes: - service_account_email: - \ `Service account email - `_ to be - used for generating OAuth token. The service account must be - within the same project as the queue. The caller must have - iam.serviceAccounts.actAs permission for the service account. - scope: - OAuth scope to be used for generating OAuth access token. If - not specified, “https://www.googleapis.com/auth/cloud- - platform” will be used. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.OAuthToken) - }, -) -_sym_db.RegisterMessage(OAuthToken) - -OidcToken = _reflection.GeneratedProtocolMessageType( - "OidcToken", - (_message.Message,), - { - "DESCRIPTOR": _OIDCTOKEN, - "__module__": "google.cloud.tasks_v2beta3.proto.target_pb2", - "__doc__": """Contains information needed for generating an `OpenID Connect token - `_. - This type of authorization can be used for many scenarios, including - calling Cloud Run, or endpoints where you intend to validate the token - yourself. - - Attributes: - service_account_email: - \ `Service account email - `_ to be - used for generating OIDC token. The service account must be - within the same project as the queue. The caller must have - iam.serviceAccounts.actAs permission for the service account. - audience: - Audience to be used when generating OIDC token. If not - specified, the URI specified in target will be used. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.OidcToken) - }, -) -_sym_db.RegisterMessage(OidcToken) - - -DESCRIPTOR._options = None -_HTTPREQUEST_HEADERSENTRY._options = None -_APPENGINEHTTPREQUEST_HEADERSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta3/proto/target_pb2_grpc.py b/google/cloud/tasks_v2beta3/proto/target_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2beta3/proto/target_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2beta3/proto/task_pb2.py b/google/cloud/tasks_v2beta3/proto/task_pb2.py deleted file mode 100644 index 047bf30b..00000000 --- a/google/cloud/tasks_v2beta3/proto/task_pb2.py +++ /dev/null @@ -1,607 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/tasks_v2beta3/proto/task.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.tasks_v2beta3.proto import ( - target_pb2 as google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_target__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/tasks_v2beta3/proto/task.proto", - package="google.cloud.tasks.v2beta3", - syntax="proto3", - serialized_options=b"\n\036com.google.cloud.tasks.v2beta3B\tTaskProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n+google/cloud/tasks_v2beta3/proto/task.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x19google/api/resource.proto\x1a-google/cloud/tasks_v2beta3/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xcd\x05\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12S\n\x17\x61pp_engine_http_request\x18\x03 \x01(\x0b\x32\x30.google.cloud.tasks.v2beta3.AppEngineHttpRequestH\x00\x12?\n\x0chttp_request\x18\x0b \x01(\x0b\x32\'.google.cloud.tasks.v2beta3.HttpRequestH\x00\x12\x31\n\rschedule_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x11\x64ispatch_deadline\x18\x0c \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x16\n\x0e\x64ispatch_count\x18\x06 \x01(\x05\x12\x16\n\x0eresponse_count\x18\x07 \x01(\x05\x12:\n\rfirst_attempt\x18\x08 \x01(\x0b\x32#.google.cloud.tasks.v2beta3.Attempt\x12\x39\n\x0clast_attempt\x18\t \x01(\x0b\x32#.google.cloud.tasks.v2beta3.Attempt\x12\x33\n\x04view\x18\n \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cpayload_type"\xcf\x01\n\x07\x41ttempt\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBn\n\x1e\x63om.google.cloud.tasks.v2beta3B\tTaskProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasksb\x06proto3', - dependencies=[ - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_target__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_TASK_VIEW = _descriptor.EnumDescriptor( - name="View", - full_name="google.cloud.tasks.v2beta3.Task.View", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="VIEW_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BASIC", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FULL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=816, - serialized_end=865, -) -_sym_db.RegisterEnumDescriptor(_TASK_VIEW) - - -_TASK = _descriptor.Descriptor( - name="Task", - full_name="google.cloud.tasks.v2beta3.Task", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.tasks.v2beta3.Task.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="app_engine_http_request", - full_name="google.cloud.tasks.v2beta3.Task.app_engine_http_request", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="http_request", - full_name="google.cloud.tasks.v2beta3.Task.http_request", - index=2, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2beta3.Task.schedule_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.tasks.v2beta3.Task.create_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dispatch_deadline", - full_name="google.cloud.tasks.v2beta3.Task.dispatch_deadline", - index=5, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dispatch_count", - full_name="google.cloud.tasks.v2beta3.Task.dispatch_count", - index=6, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_count", - full_name="google.cloud.tasks.v2beta3.Task.response_count", - index=7, - number=7, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="first_attempt", - full_name="google.cloud.tasks.v2beta3.Task.first_attempt", - index=8, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="last_attempt", - full_name="google.cloud.tasks.v2beta3.Task.last_attempt", - index=9, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="view", - full_name="google.cloud.tasks.v2beta3.Task.view", - index=10, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_TASK_VIEW,], - serialized_options=b"\352Ae\n\036cloudtasks.googleapis.com/Task\022Cprojects/{project}/locations/{location}/queues/{queue}/tasks/{task}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload_type", - full_name="google.cloud.tasks.v2beta3.Task.payload_type", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=270, - serialized_end=987, -) - - -_ATTEMPT = _descriptor.Descriptor( - name="Attempt", - full_name="google.cloud.tasks.v2beta3.Attempt", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.tasks.v2beta3.Attempt.schedule_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dispatch_time", - full_name="google.cloud.tasks.v2beta3.Attempt.dispatch_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_time", - full_name="google.cloud.tasks.v2beta3.Attempt.response_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="response_status", - full_name="google.cloud.tasks.v2beta3.Attempt.response_status", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=990, - serialized_end=1197, -) - -_TASK.fields_by_name[ - "app_engine_http_request" -].message_type = ( - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_target__pb2._APPENGINEHTTPREQUEST -) -_TASK.fields_by_name[ - "http_request" -].message_type = ( - google_dot_cloud_dot_tasks__v2beta3_dot_proto_dot_target__pb2._HTTPREQUEST -) -_TASK.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TASK.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TASK.fields_by_name[ - "dispatch_deadline" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_TASK.fields_by_name["first_attempt"].message_type = _ATTEMPT -_TASK.fields_by_name["last_attempt"].message_type = _ATTEMPT -_TASK.fields_by_name["view"].enum_type = _TASK_VIEW -_TASK_VIEW.containing_type = _TASK -_TASK.oneofs_by_name["payload_type"].fields.append( - _TASK.fields_by_name["app_engine_http_request"] -) -_TASK.fields_by_name["app_engine_http_request"].containing_oneof = _TASK.oneofs_by_name[ - "payload_type" -] -_TASK.oneofs_by_name["payload_type"].fields.append(_TASK.fields_by_name["http_request"]) -_TASK.fields_by_name["http_request"].containing_oneof = _TASK.oneofs_by_name[ - "payload_type" -] -_ATTEMPT.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPT.fields_by_name[ - "dispatch_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPT.fields_by_name[ - "response_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ATTEMPT.fields_by_name[ - "response_status" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -DESCRIPTOR.message_types_by_name["Task"] = _TASK -DESCRIPTOR.message_types_by_name["Attempt"] = _ATTEMPT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Task = _reflection.GeneratedProtocolMessageType( - "Task", - (_message.Message,), - { - "DESCRIPTOR": _TASK, - "__module__": "google.cloud.tasks_v2beta3.proto.task_pb2", - "__doc__": """A unit of scheduled work. - - Attributes: - name: - Optionally caller-specified in [CreateTask][google.cloud.tasks - .v2beta3.CloudTasks.CreateTask]. The task name. The task - name must have the following format: ``projects/PROJECT_ID/loc - ations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers - ([0-9]), hyphens (-), colons (:), or periods (.). For more - information, see `Identifying projects - `_ - ``LOCATION_ID`` - is the canonical ID for the task’s location. The list of - available locations can be obtained by calling [ListLocatio - ns][google.cloud.location.Locations.ListLocations]. For - more information, see - https://cloud.google.com/about/locations/. - ``QUEUE_ID`` can - contain letters ([A-Za-z]), numbers ([0-9]), or hyphens - (-). The maximum length is 100 characters. - ``TASK_ID`` can - contain only letters ([A-Za-z]), numbers ([0-9]), hyphens - (-), or underscores (_). The maximum length is 500 - characters. - payload_type: - Required. The message to send to the worker. - app_engine_http_request: - HTTP request that is sent to the App Engine app handler. An - App Engine task is a task that has [AppEngineHttpRequest][goog - le.cloud.tasks.v2beta3.AppEngineHttpRequest] set. - http_request: - HTTP request that is sent to the task’s target. An HTTP task - is a task that has - [HttpRequest][google.cloud.tasks.v2beta3.HttpRequest] set. - schedule_time: - The time when the task is scheduled to be attempted. For App - Engine queues, this is when the task will be attempted or - retried. ``schedule_time`` will be truncated to the nearest - microsecond. - create_time: - Output only. The time that the task was created. - ``create_time`` will be truncated to the nearest second. - dispatch_deadline: - The deadline for requests sent to the worker. If the worker - does not respond by this deadline then the request is - cancelled and the attempt is marked as a ``DEADLINE_EXCEEDED`` - failure. Cloud Tasks will retry the task according to the - [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. Note - that when the request is cancelled, Cloud Tasks will stop - listening for the response, but whether the worker stops - processing depends on the worker. For example, if the worker - is stuck, it may not react to cancelled requests. The default - and maximum values depend on the type of request: - For - [HTTP tasks][google.cloud.tasks.v2beta3.HttpRequest], the - default is 10 minutes. The deadline must be in the interval - [15 seconds, 30 minutes]. - For [App Engine - tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest], 0 - indicates that the request has the default deadline. The - default deadline depends on the `scaling type - `_ of the service: - 10 minutes for standard apps with automatic scaling, 24 - hours for standard apps with manual and basic scaling, and 60 - minutes for flex apps. If the request deadline is set, it must - be in the interval [15 seconds, 24 hours 15 seconds]. - Regardless of the task’s ``dispatch_deadline``, the app - handler will not run for longer than than the service’s - timeout. We recommend setting the ``dispatch_deadline`` to - at most a few seconds more than the app handler’s timeout. - For more information see `Timeouts - `_. ``dispatch_deadline`` will be - truncated to the nearest millisecond. The deadline is an - approximate deadline. - dispatch_count: - Output only. The number of attempts dispatched. This count - includes attempts which have been dispatched but haven’t - received a response. - response_count: - Output only. The number of attempts which have received a - response. - first_attempt: - Output only. The status of the task’s first attempt. Only [di - spatch_time][google.cloud.tasks.v2beta3.Attempt.dispatch_time] - will be set. The other - [Attempt][google.cloud.tasks.v2beta3.Attempt] information is - not retained by Cloud Tasks. - last_attempt: - Output only. The status of the task’s last attempt. - view: - Output only. The view specifies which subset of the - [Task][google.cloud.tasks.v2beta3.Task] has been returned. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.Task) - }, -) -_sym_db.RegisterMessage(Task) - -Attempt = _reflection.GeneratedProtocolMessageType( - "Attempt", - (_message.Message,), - { - "DESCRIPTOR": _ATTEMPT, - "__module__": "google.cloud.tasks_v2beta3.proto.task_pb2", - "__doc__": """The status of a task attempt. - - Attributes: - schedule_time: - Output only. The time that this attempt was scheduled. - ``schedule_time`` will be truncated to the nearest - microsecond. - dispatch_time: - Output only. The time that this attempt was dispatched. - ``dispatch_time`` will be truncated to the nearest - microsecond. - response_time: - Output only. The time that this attempt response was received. - ``response_time`` will be truncated to the nearest - microsecond. - response_status: - Output only. The response from the worker for this attempt. - If ``response_time`` is unset, then the task has not been - attempted or is currently running and the ``response_status`` - field is meaningless. - """, - # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta3.Attempt) - }, -) -_sym_db.RegisterMessage(Attempt) - - -DESCRIPTOR._options = None -_TASK._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/tasks_v2beta3/proto/task_pb2_grpc.py b/google/cloud/tasks_v2beta3/proto/task_pb2_grpc.py deleted file mode 100644 index 8a939394..00000000 --- a/google/cloud/tasks_v2beta3/proto/task_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/tasks_v2beta3/py.typed b/google/cloud/tasks_v2beta3/py.typed new file mode 100644 index 00000000..41f0b1b8 --- /dev/null +++ b/google/cloud/tasks_v2beta3/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/google/cloud/tasks_v2beta3/services/__init__.py b/google/cloud/tasks_v2beta3/services/__init__.py new file mode 100644 index 00000000..42ffdf2b --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/tasks.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py similarity index 70% rename from google/cloud/tasks.py rename to google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py index 4b5214d5..498f5941 100644 --- a/google/cloud/tasks.py +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py @@ -1,29 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import - -from google.cloud.tasks_v2 import CloudTasksClient -from google.cloud.tasks_v2 import enums -from google.cloud.tasks_v2 import types - +from .client import CloudTasksClient +from .async_client import CloudTasksAsyncClient __all__ = ( - "enums", - "types", "CloudTasksClient", + "CloudTasksAsyncClient", ) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py new file mode 100644 index 00000000..d56f5e57 --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py @@ -0,0 +1,1731 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .client import CloudTasksClient + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + task_path = staticmethod(CloudTasksClient.task_path) + + queue_path = staticmethod(CloudTasksClient.queue_path) + + from_service_account_file = CloudTasksClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(CloudTasksClient).get_transport_class, type(CloudTasksClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_queues( + self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`~.cloudtasks.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue( + self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`~.cloudtasks.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_queue( + self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] + cannot be the same as an existing queue. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, queue]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_queue( + self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] cannot be + changed. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([queue, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_queue( + self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def purge_queue( + self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`~.cloudtasks.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def pause_queue( + self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Args: + request (:class:`~.cloudtasks.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def resume_queue( + self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`~.cloudtasks.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.GetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.SetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([resource, permissions]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + elif not request: + request = iam_policy.TestIamPermissionsRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_tasks( + self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`~.cloudtasks.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task( + self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`~.cloudtasks.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_task( + self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (:class:`~.cloudtasks.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`~.gct_task.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, task]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_task( + self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (:class:`~.cloudtasks.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def run_task( + self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (:class:`~.cloudtasks.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudTasksAsyncClient",) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py new file mode 100644 index 00000000..9d9105da --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py @@ -0,0 +1,1839 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[CloudTasksTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def queue_path(project: str, location: str, queue: str,) -> str: + """Return a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, location=location, queue=queue, + ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str, str]: + """Parse a queue path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def task_path(project: str, location: str, queue: str, task: str,) -> str: + """Return a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, location=location, queue=queue, task=task, + ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str, str]: + """Parse a task path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudTasksTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_queues( + self, + request: cloudtasks.ListQueuesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + Queues are returned in lexicographical order. + + Args: + request (:class:`~.cloudtasks.ListQueuesRequest`): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue( + self, + request: cloudtasks.GetQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + Args: + request (:class:`~.cloudtasks.GetQueueRequest`): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_queue( + self, + request: cloudtasks.CreateQueueRequest = None, + *, + parent: str = None, + queue: gct_queue.Queue = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.CreateQueueRequest`): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] + cannot be the same as an existing queue. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_queue( + self, + request: cloudtasks.UpdateQueueRequest = None, + *, + queue: gct_queue.Queue = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.UpdateQueueRequest`): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + queue (:class:`~.gct_queue.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] cannot be + changed. + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_queue( + self, + request: cloudtasks.DeleteQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Args: + request (:class:`~.cloudtasks.DeleteQueueRequest`): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def purge_queue( + self, + request: cloudtasks.PurgeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Args: + request (:class:`~.cloudtasks.PurgeQueueRequest`): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def pause_queue( + self, + request: cloudtasks.PauseQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Args: + request (:class:`~.cloudtasks.PauseQueueRequest`): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def resume_queue( + self, + request: cloudtasks.ResumeQueueRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Args: + request (:class:`~.cloudtasks.ResumeQueueRequest`): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.GetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + resource: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + elif not request: + request = iam_policy.SetIamPolicyRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + elif not request: + request = iam_policy.TestIamPermissionsRequest() + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if resource is not None: + request.resource = resource + + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_tasks( + self, + request: cloudtasks.ListTasksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Args: + request (:class:`~.cloudtasks.ListTasksRequest`): + The request object. Request message for listing tasks + using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task( + self, + request: cloudtasks.GetTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + Args: + request (:class:`~.cloudtasks.GetTaskRequest`): + The request object. Request message for getting a task + using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_task( + self, + request: cloudtasks.CreateTaskRequest = None, + *, + parent: str = None, + task: gct_task.Task = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Args: + request (:class:`~.cloudtasks.CreateTaskRequest`): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`~.gct_task.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_task( + self, + request: cloudtasks.DeleteTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Args: + request (:class:`~.cloudtasks.DeleteTaskRequest`): + The request object. Request message for deleting a task + using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def run_task( + self, + request: cloudtasks.RunTaskRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Args: + request (:class:`~.cloudtasks.RunTaskRequest`): + The request object. Request message for forcing a task + to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudTasksClient",) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py new file mode 100644 index 00000000..1f0f543f --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListQueuesRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListQueuesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListQueuesRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListQueuesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListTasksRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListTasksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`~.cloudtasks.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`~.cloudtasks.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.cloudtasks.ListTasksRequest`): + The initial request object. + response (:class:`~.cloudtasks.ListTasksResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py new file mode 100644 index 00000000..72f33c1b --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry["grpc"] = CloudTasksGrpcTransport +_transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + + +__all__ = ( + "CloudTasksTransport", + "CloudTasksGrpcTransport", + "CloudTasksGrpcAsyncIOTransport", +) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py new file mode 100644 index 00000000..a7c0cfe4 --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py @@ -0,0 +1,397 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-tasks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, default_timeout=10.0, client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, default_timeout=10.0, client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, default_timeout=10.0, client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, default_timeout=10.0, client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, default_timeout=10.0, client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, default_timeout=10.0, client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, default_timeout=10.0, client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=10.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, default_timeout=10.0, client_info=client_info, + ), + } + + @property + def list_queues( + self, + ) -> typing.Callable[ + [cloudtasks.ListQueuesRequest], + typing.Union[ + cloudtasks.ListQueuesResponse, + typing.Awaitable[cloudtasks.ListQueuesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_queue( + self, + ) -> typing.Callable[ + [cloudtasks.GetQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def create_queue( + self, + ) -> typing.Callable[ + [cloudtasks.CreateQueueRequest], + typing.Union[gct_queue.Queue, typing.Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def update_queue( + self, + ) -> typing.Callable[ + [cloudtasks.UpdateQueueRequest], + typing.Union[gct_queue.Queue, typing.Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def delete_queue( + self, + ) -> typing.Callable[ + [cloudtasks.DeleteQueueRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def purge_queue( + self, + ) -> typing.Callable[ + [cloudtasks.PurgeQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def pause_queue( + self, + ) -> typing.Callable[ + [cloudtasks.PauseQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def resume_queue( + self, + ) -> typing.Callable[ + [cloudtasks.ResumeQueueRequest], + typing.Union[queue.Queue, typing.Awaitable[queue.Queue]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_tasks( + self, + ) -> typing.Callable[ + [cloudtasks.ListTasksRequest], + typing.Union[ + cloudtasks.ListTasksResponse, typing.Awaitable[cloudtasks.ListTasksResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_task( + self, + ) -> typing.Callable[ + [cloudtasks.GetTaskRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + @property + def create_task( + self, + ) -> typing.Callable[ + [cloudtasks.CreateTaskRequest], + typing.Union[gct_task.Task, typing.Awaitable[gct_task.Task]], + ]: + raise NotImplementedError() + + @property + def delete_task( + self, + ) -> typing.Callable[ + [cloudtasks.DeleteTaskRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def run_task( + self, + ) -> typing.Callable[ + [cloudtasks.RunTaskRequest], + typing.Union[task.Task, typing.Awaitable[task.Task]], + ]: + raise NotImplementedError() + + +__all__ = ("CloudTasksTransport",) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py new file mode 100644 index 00000000..f026573f --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,756 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue(self) -> Callable[[cloudtasks.DeleteQueueRequest], empty.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + +__all__ = ("CloudTasksGrpcTransport",) diff --git a/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 00000000..8de89fc3 --- /dev/null +++ b/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,768 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], Awaitable[cloudtasks.ListQueuesResponse] + ]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue( + self, + ) -> Callable[[cloudtasks.GetQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue( + self, + ) -> Callable[[cloudtasks.PurgeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + All tasks created before this method is called are + permanently deleted. + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue( + self, + ) -> Callable[[cloudtasks.PauseQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue( + self, + ) -> Callable[[cloudtasks.ResumeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], Awaitable[cloudtasks.ListTasksResponse] + ]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task( + self, + ) -> Callable[[cloudtasks.CreateTaskRequest], Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task( + self, + ) -> Callable[[cloudtasks.DeleteTaskRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + +__all__ = ("CloudTasksGrpcAsyncIOTransport",) diff --git a/google/cloud/tasks_v2beta3/types.py b/google/cloud/tasks_v2beta3/types.py deleted file mode 100644 index 6c3a9c40..00000000 --- a/google/cloud/tasks_v2beta3/types.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.tasks_v2beta3.proto import cloudtasks_pb2 -from google.cloud.tasks_v2beta3.proto import queue_pb2 -from google.cloud.tasks_v2beta3.proto import target_pb2 -from google.cloud.tasks_v2beta3.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 -from google.type import expr_pb2 - - -_shared_modules = [ - iam_policy_pb2, - options_pb2, - policy_pb2, - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, - expr_pb2, -] - -_local_modules = [ - cloudtasks_pb2, - queue_pb2, - target_pb2, - task_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.tasks_v2beta3.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/tasks_v2beta3/types/__init__.py b/google/cloud/tasks_v2beta3/types/__init__.py new file mode 100644 index 00000000..012ce254 --- /dev/null +++ b/google/cloud/tasks_v2beta3/types/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .target import ( + HttpRequest, + AppEngineHttpQueue, + AppEngineHttpRequest, + AppEngineRouting, + OAuthToken, + OidcToken, +) +from .queue import ( + Queue, + RateLimits, + RetryConfig, + StackdriverLoggingConfig, +) +from .task import ( + Task, + Attempt, +) +from .cloudtasks import ( + ListQueuesRequest, + ListQueuesResponse, + GetQueueRequest, + CreateQueueRequest, + UpdateQueueRequest, + DeleteQueueRequest, + PurgeQueueRequest, + PauseQueueRequest, + ResumeQueueRequest, + ListTasksRequest, + ListTasksResponse, + GetTaskRequest, + CreateTaskRequest, + DeleteTaskRequest, + RunTaskRequest, +) + + +__all__ = ( + "HttpRequest", + "AppEngineHttpQueue", + "AppEngineHttpRequest", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + "Queue", + "RateLimits", + "RetryConfig", + "StackdriverLoggingConfig", + "Task", + "Attempt", + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "RunTaskRequest", +) diff --git a/google/cloud/tasks_v2beta3/types/cloudtasks.py b/google/cloud/tasks_v2beta3/types/cloudtasks.py new file mode 100644 index 00000000..e469298c --- /dev/null +++ b/google/cloud/tasks_v2beta3/types/cloudtasks.py @@ -0,0 +1,479 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.protobuf import field_mask_pb2 as field_mask # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", + manifest={ + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "RunTaskRequest", + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2beta3.Queue] field can be used + as a filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "state: PAUSED". + + Note that using filters might cause fewer queues than the + requested page_size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2beta3.ListQueuesRequest.filter] + while iterating through pages. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Attributes: + queues (Sequence[~.gct_queue.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2beta3.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues = proto.RepeatedField(proto.MESSAGE, number=1, message=gct_queue.Queue,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (~.gct_queue.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] cannot + be the same as an existing queue. + """ + + parent = proto.Field(proto.STRING, number=1) + + queue = proto.Field(proto.MESSAGE, number=2, message=gct_queue.Queue,) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + + Attributes: + queue (~.gct_queue.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2beta3.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2beta3.Queue.name] + cannot be changed. + update_mask (~.field_mask.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue = proto.Field(proto.MESSAGE, number=1, message=gct_queue.Queue,) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Attributes: + tasks (Sequence[~.gct_task.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] + with this value as the + [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks = proto.RepeatedField(proto.MESSAGE, number=1, message=gct_task.Task,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (~.gct_task.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name is + not specified then the system will generate a random unique + task id, which will be set in the task returned in the + [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or executed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour after + the original task was deleted or executed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9days + after the original task was deleted or executed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + parent = proto.Field(proto.STRING, number=1) + + task = proto.Field(proto.MESSAGE, number=2, message=gct_task.Task,) + + response_view = proto.Field(proto.ENUM, number=3, enum=gct_task.Task.View,) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~.gct_task.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + name = proto.Field(proto.STRING, number=1) + + response_view = proto.Field(proto.ENUM, number=2, enum=gct_task.Task.View,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta3/types/queue.py b/google/cloud/tasks_v2beta3/types/queue.py new file mode 100644 index 00000000..1c9cc8a2 --- /dev/null +++ b/google/cloud/tasks_v2beta3/types/queue.py @@ -0,0 +1,406 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2beta3.types import target +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", + manifest={"Queue", "RateLimits", "RetryConfig", "StackdriverLoggingConfig",}, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + queue types, and others. + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_http_queue (~.target.AppEngineHttpQueue): + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + settings apply only to [App Engine + tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest] in + this queue. [Http + tasks][google.cloud.tasks.v2beta3.HttpRequest] are not + affected by this proto. + rate_limits (~.queue.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] + and + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + are related because they both control task attempts. However + they control task attempts in different ways: + + - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + + The queue's actual dispatch rate is the result of: + + - Number of tasks in the queue + - User-specified throttling: + [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits], + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config], + and the [queue's + state][google.cloud.tasks.v2beta3.Queue.state]. + - System throttling due to ``429`` (Too Many Requests) or + ``503`` (Service Unavailable) responses from the worker, + high error rates, or to smooth sudden large traffic + spikes. + retry_config (~.queue.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (~.queue.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by called + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (~.timestamp.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2beta3.Task.create_time] + before this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + stackdriver_logging_config (~.queue.StackdriverLoggingConfig): + Configuration options for writing logs to `Stackdriver + Logging `__. If this + field is unset, then no logs are written. + type (~.queue.Queue.Type): + Immutable. The type of a queue (push or pull). + + ``Queue.type`` is an immutable property of the queue that is + set at the queue creation time. When left unspecified, the + default value of ``PUSH`` is selected. + """ + + class State(proto.Enum): + r"""State of the queue.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + class Type(proto.Enum): + r"""The type of the queue.""" + TYPE_UNSPECIFIED = 0 + PULL = 1 + PUSH = 2 + + name = proto.Field(proto.STRING, number=1) + + app_engine_http_queue = proto.Field( + proto.MESSAGE, number=3, oneof="queue_type", message=target.AppEngineHttpQueue, + ) + + rate_limits = proto.Field(proto.MESSAGE, number=4, message="RateLimits",) + + retry_config = proto.Field(proto.MESSAGE, number=5, message="RetryConfig",) + + state = proto.Field(proto.ENUM, number=6, enum=State,) + + purge_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + stackdriver_logging_config = proto.Field( + proto.MESSAGE, number=10, message="StackdriverLoggingConfig", + ) + + type = proto.Field(proto.ENUM, number=11, enum=Type,) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask], will run a + task even if the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits]. + + Attributes: + max_dispatches_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - For [App Engine + queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], + the maximum allowed value is 500. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + Output only. The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + + Cloud Tasks will pick the value of ``max_burst_size`` based + on the value of + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + + For App Engine queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + Since ``max_burst_size`` is output only, if + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + is called on a queue created by ``queue.yaml/xml``, + ``max_burst_size`` will be reset based on the value of + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second], + regardless of whether + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second] + is updated. + max_concurrent_dispatches (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_dispatches_per_second = proto.Field(proto.DOUBLE, number=1) + + max_burst_size = proto.Field(proto.INT32, number=2) + + max_concurrent_dispatches = proto.Field(proto.INT32, number=3) + + +class RetryConfig(proto.Message): + r"""Retry config. + These settings determine when a failed task attempt is retried. + + Attributes: + max_attempts (int): + Number of attempts per task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be >= -1. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + -1 indicates unlimited attempts. + + This field has the same meaning as `task_retry_limit in + queue.yaml/xml `__. + max_retry_duration (~.duration.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (~.duration.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (~.duration.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts = proto.Field(proto.INT32, number=1) + + max_retry_duration = proto.Field( + proto.MESSAGE, number=2, message=duration.Duration, + ) + + min_backoff = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + + max_backoff = proto.Field(proto.MESSAGE, number=4, message=duration.Duration,) + + max_doublings = proto.Field(proto.INT32, number=5) + + +class StackdriverLoggingConfig(proto.Message): + r"""Configuration options for writing logs to `Stackdriver + Logging `__. + + Attributes: + sampling_ratio (float): + Specifies the fraction of operations to write to + `Stackdriver + Logging `__. This + field may contain any value between 0.0 and 1.0, inclusive. + 0.0 is the default and means that no operations are logged. + """ + + sampling_ratio = proto.Field(proto.DOUBLE, number=1) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta3/types/target.py b/google/cloud/tasks_v2beta3/types/target.py new file mode 100644 index 00000000..69379e33 --- /dev/null +++ b/google/cloud/tasks_v2beta3/types/target.py @@ -0,0 +1,535 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", + manifest={ + "HttpMethod", + "HttpRequest", + "AppEngineHttpQueue", + "AppEngineHttpRequest", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the task.""" + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class HttpRequest(proto.Message): + r"""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the + worker or the redirected worker acknowledges the task by returning a + successful HTTP response code ([``200`` - ``299``]), the task will + be removed from the queue. If any other HTTP response code is + returned or no response is received, the task will be retried + according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2beta3.Queue.retry_config], + [rate limits][google.cloud.tasks.v2beta3.Queue.rate_limits], and + the [queue's state][google.cloud.tasks.v2beta3.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff + specified in [rate + limits][google.cloud.tasks.v2beta3.Queue.rate_limits] will be + used. But if the worker returns ``429`` (Too Many Requests), + ``503`` (Service Unavailable), or the rate of errors is high, + Cloud Tasks will use a higher backoff rate. The retry specified + in the ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden increases in + traffic, dispatches ramp up slowly when the queue is newly + created or idle and if large numbers of tasks suddenly become + available to dispatch (due to spikes in create task rates, the + queue being unpaused, or many tasks that are scheduled at the + same time). + + Attributes: + url (str): + Required. The full url path that the request will be sent + to. + + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode + some characters for safety and compatibility. The maximum + allowed URL length is 2083 characters after encoding. + + The ``Location`` header response from a redirect response + [``300`` - ``399``] may be followed. The redirect is not + counted as a separate attempt. + http_method (~.target.HttpMethod): + The HTTP method to use for the request. The + default is POST. + headers (Sequence[~.target.HttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2beta3.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + - X-Google-\*: Google use only. + - X-AppEngine-\*: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. + body (bytes): + HTTP request body. + + A request body is allowed only if the [HTTP + method][google.cloud.tasks.v2beta3.HttpRequest.http_method] + is POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + oauth_token (~.target.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + oidc_token (~.target.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + """ + + url = proto.Field(proto.STRING, number=1) + + http_method = proto.Field(proto.ENUM, number=2, enum="HttpMethod",) + + headers = proto.MapField(proto.STRING, proto.STRING, number=3) + + body = proto.Field(proto.BYTES, number=4) + + oauth_token = proto.Field( + proto.MESSAGE, number=5, oneof="authorization_header", message="OAuthToken", + ) + + oidc_token = proto.Field( + proto.MESSAGE, number=6, oneof="authorization_header", message="OidcToken", + ) + + +class AppEngineHttpQueue(proto.Message): + r"""App Engine HTTP queue. + + The task will be delivered to the App Engine application hostname + specified by its + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + and + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest]. + The documentation for + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + explains how the task's host URL is constructed. + + Using + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + app_engine_routing_override (~.target.AppEngineRouting): + Overrides for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + + If set, ``app_engine_routing_override`` is used for all + tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + """ + + app_engine_routing_override = proto.Field( + proto.MESSAGE, number=1, message="AppEngineRouting", + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2beta3.AppEngineRouting] + used to construct the URL that the task is delivered to can be set + at the queue-level or task-level: + + - If set, + [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] + is used for all tasks in the queue, no matter what the setting is + for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] ``+`` + [relative_uri][google.cloud.tasks.v2beta3.AppEngineHttpRequest.relative_uri] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][google.cloud.tasks.v2beta3.Task.dispatch_deadline]. + Failed tasks will be retried according to the [retry + configuration][google.cloud.tasks.v2beta3.Queue.retry_config]. + ``503`` (Service Unavailable) is considered an App Engine system + error instead of an application error and will cause Cloud Tasks' + traffic congestion control to temporarily throttle the queue's + dispatches. Unlike other types of task targets, a ``429`` (Too Many + Requests) response from an app handler does not cause traffic + congestion control to throttle the queue. + + Attributes: + http_method (~.target.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt fails with error code 405 (Method + Not Allowed). See `Writing a push task request + handler `__ + and the App Engine documentation for your runtime on `How + Requests are + Handled `__. + app_engine_routing (~.target.AppEngineRouting): + Task-level setting for App Engine routing. + + If set, + [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] + is used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + relative_uri (str): + The relative URI. + The relative URI must begin with "/" and must be + a valid HTTP relative URI. It can contain a path + and query string arguments. If the relative URI + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (Sequence[~.target.AppEngineHttpRequest.HeadersEntry]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [body][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-\*`` + - ``X-AppEngine-\*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2beta3.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + documentation. + body (bytes): + HTTP request body. + + A request body is allowed only if the HTTP method is POST or + PUT. It is an error to set a body on a task with an + incompatible + [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + """ + + http_method = proto.Field(proto.ENUM, number=1, enum="HttpMethod",) + + app_engine_routing = proto.Field( + proto.MESSAGE, number=2, message="AppEngineRouting", + ) + + relative_uri = proto.Field(proto.STRING, number=3) + + headers = proto.MapField(proto.STRING, proto.STRING, number=4) + + body = proto.Field(proto.BYTES, number=5) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + The host is constructed from the domain name of the app + associated with the queue's project ID (for example + .appspot.com), and the + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + Tasks which were created using the App Engine SDK might have + a custom domain name. + + For more information, see `How Requests are + Routed `__. + """ + + service = proto.Field(proto.STRING, number=1) + + version = proto.Field(proto.STRING, number=2) + + instance = proto.Field(proto.STRING, number=3) + + host = proto.Field(proto.STRING, number=4) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email = proto.Field(proto.STRING, number=1) + + scope = proto.Field(proto.STRING, number=2) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email = proto.Field(proto.STRING, number=1) + + audience = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/tasks_v2beta3/types/task.py b/google/cloud/tasks_v2beta3/types/task.py new file mode 100644 index 00000000..518cd8b4 --- /dev/null +++ b/google/cloud/tasks_v2beta3/types/task.py @@ -0,0 +1,229 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.tasks_v2beta3.types import target +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", manifest={"Task", "Attempt",}, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (~.target.AppEngineHttpRequest): + HTTP request that is sent to the App Engine app handler. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + set. + http_request (~.target.HttpRequest): + HTTP request that is sent to the task's target. + + An HTTP task is a task that has + [HttpRequest][google.cloud.tasks.v2beta3.HttpRequest] set. + schedule_time (~.timestamp.Timestamp): + The time when the task is scheduled to be attempted. + + For App Engine queues, this is when the task will be + attempted or retried. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (~.timestamp.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + dispatch_deadline (~.duration.Duration): + The deadline for requests sent to the worker. If the worker + does not respond by this deadline then the request is + cancelled and the attempt is marked as a + ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the + task according to the + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + Note that when the request is cancelled, Cloud Tasks will + stop listening for the response, but whether the worker + stops processing depends on the worker. For example, if the + worker is stuck, it may not react to cancelled requests. + + The default and maximum values depend on the type of + request: + + - For [HTTP tasks][google.cloud.tasks.v2beta3.HttpRequest], + the default is 10 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine + tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest], + 0 indicates that the request has the default deadline. + The default deadline depends on the `scaling + type `__ + of the service: 10 minutes for standard apps with + automatic scaling, 24 hours for standard apps with manual + and basic scaling, and 60 minutes for flex apps. If the + request deadline is set, it must be in the interval [15 + seconds, 24 hours 15 seconds]. Regardless of the task's + ``dispatch_deadline``, the app handler will not run for + longer than than the service's timeout. We recommend + setting the ``dispatch_deadline`` to at most a few + seconds more than the app handler's timeout. For more + information see + `Timeouts `__. + + ``dispatch_deadline`` will be truncated to the nearest + millisecond. The deadline is an approximate deadline. + dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + response_count (int): + Output only. The number of attempts which + have received a response. + first_attempt (~.task.Attempt): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2beta3.Attempt.dispatch_time] + will be set. The other + [Attempt][google.cloud.tasks.v2beta3.Attempt] information is + not retained by Cloud Tasks. + last_attempt (~.task.Attempt): + Output only. The status of the task's last + attempt. + view (~.task.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] has been returned. + """ + + class View(proto.Enum): + r"""The view specifies a subset of + [Task][google.cloud.tasks.v2beta3.Task] data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name = proto.Field(proto.STRING, number=1) + + app_engine_http_request = proto.Field( + proto.MESSAGE, + number=3, + oneof="payload_type", + message=target.AppEngineHttpRequest, + ) + + http_request = proto.Field( + proto.MESSAGE, number=11, oneof="payload_type", message=target.HttpRequest, + ) + + schedule_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + dispatch_deadline = proto.Field( + proto.MESSAGE, number=12, message=duration.Duration, + ) + + dispatch_count = proto.Field(proto.INT32, number=6) + + response_count = proto.Field(proto.INT32, number=7) + + first_attempt = proto.Field(proto.MESSAGE, number=8, message="Attempt",) + + last_attempt = proto.Field(proto.MESSAGE, number=9, message="Attempt",) + + view = proto.Field(proto.ENUM, number=10, enum=View,) + + +class Attempt(proto.Message): + r"""The status of a task attempt. + + Attributes: + schedule_time (~.timestamp.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (~.timestamp.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (~.timestamp.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (~.status.Status): + Output only. The response from the worker for this attempt. + + If ``response_time`` is unset, then the task has not been + attempted or is currently running and the + ``response_status`` field is meaningless. + """ + + schedule_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + dispatch_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + response_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + response_status = proto.Field(proto.MESSAGE, number=4, message=status.Status,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/noxfile.py b/noxfile.py index b2bc0341..5f789fdd 100644 --- a/noxfile.py +++ b/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -139,7 +141,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=86") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/samples/snippets/create_http_task.py b/samples/snippets/create_http_task.py index 23896209..1b75e69c 100644 --- a/samples/snippets/create_http_task.py +++ b/samples/snippets/create_http_task.py @@ -17,13 +17,9 @@ import argparse -def create_http_task(project, - queue, - location, - url, - payload=None, - in_seconds=None, - task_name=None): +def create_http_task( + project, queue, location, url, payload=None, in_seconds=None, task_name=None +): # [START cloud_tasks_create_http_task] """Create a task for a given queue with an arbitrary payload.""" @@ -47,23 +43,23 @@ def create_http_task(project, # Construct the request body. task = { - 'http_request': { # Specify the type of request. - 'http_method': 'POST', - 'url': url # The full url path that the task will be sent to. - } + "http_request": { # Specify the type of request. + "http_method": tasks_v2.HttpMethod.POST, + "url": url, # The full url path that the task will be sent to. + } } if payload is not None: if isinstance(payload, dict): # Convert dict to JSON string payload = json.dumps(payload) # specify http content-type to application/json - task['http_request']['headers'] = {'Content-type': 'application/json'} + task["http_request"]["headers"] = {"Content-type": "application/json"} # The API expects a payload of type bytes. converted_payload = payload.encode() # Add the payload to the request. - task['http_request']['body'] = converted_payload + task["http_request"]["body"] = converted_payload if in_seconds is not None: # Convert "seconds from now" into an rfc3339 datetime string. @@ -74,65 +70,65 @@ def create_http_task(project, timestamp.FromDatetime(d) # Add the timestamp to the tasks. - task['schedule_time'] = timestamp + task["schedule_time"] = timestamp if task_name is not None: # Add the name to tasks. - task['name'] = task_name + task["name"] = task_name # Use the client to build and send the task. - response = client.create_task(parent, task) + response = client.create_task(request={"parent": parent, "task": task}) - print('Created task {}'.format(response.name)) + print("Created task {}".format(response.name)) # [END cloud_tasks_create_http_task] return response -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=create_http_task.__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) + formatter_class=argparse.RawDescriptionHelpFormatter, + ) parser.add_argument( - '--project', - help='Project of the queue to add the task to.', - required=True, + "--project", help="Project of the queue to add the task to.", required=True, ) parser.add_argument( - '--queue', - help='ID (short name) of the queue to add the task to.', + "--queue", + help="ID (short name) of the queue to add the task to.", required=True, ) parser.add_argument( - '--location', - help='Location of the queue to add the task to.', - required=True, + "--location", help="Location of the queue to add the task to.", required=True, ) parser.add_argument( - '--url', - help='The full url path that the request will be sent to.', + "--url", + help="The full url path that the request will be sent to.", required=True, ) parser.add_argument( - '--payload', - help='Optional payload to attach to the push queue.' + "--payload", help="Optional payload to attach to the push queue." ) parser.add_argument( - '--in_seconds', type=int, - help='The number of seconds from now to schedule task attempt.' + "--in_seconds", + type=int, + help="The number of seconds from now to schedule task attempt.", ) - parser.add_argument( - '--task_name', - help='Task name of the task to create' - ) + parser.add_argument("--task_name", help="Task name of the task to create") args = parser.parse_args() create_http_task( - args.project, args.queue, args.location, args.url, - args.payload, args.in_seconds, args.task_name) + args.project, + args.queue, + args.location, + args.url, + args.payload, + args.in_seconds, + args.task_name, + ) diff --git a/samples/snippets/create_http_task_test.py b/samples/snippets/create_http_task_test.py index b0fb3ed7..20cfced9 100644 --- a/samples/snippets/create_http_task_test.py +++ b/samples/snippets/create_http_task_test.py @@ -20,29 +20,29 @@ import create_http_task -TEST_PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') -TEST_LOCATION = os.getenv('TEST_QUEUE_LOCATION', 'us-central1') -TEST_QUEUE_NAME = f'my-queue-{uuid.uuid4().hex}' +TEST_PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") +TEST_LOCATION = os.getenv("TEST_QUEUE_LOCATION", "us-central1") +TEST_QUEUE_NAME = f"my-queue-{uuid.uuid4().hex}" @pytest.fixture() def test_queue(): client = tasks_v2.CloudTasksClient() - parent = client.location_path(TEST_PROJECT_ID, TEST_LOCATION) + parent = f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}" queue = { # The fully qualified path to the queue - 'name': client.queue_path( - TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), + "name": client.queue_path(TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), } - q = client.create_queue(parent, queue) + q = client.create_queue(request={"parent": parent, "queue": queue}) yield q - client.delete_queue(q.name) + client.delete_queue(request={"name": q.name}) def test_create_http_task(test_queue): - url = 'https://example.com/task_handler' + url = "https://example.com/task_handler" result = create_http_task.create_http_task( - TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION, url) + TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION, url + ) assert TEST_QUEUE_NAME in result.name diff --git a/samples/snippets/create_http_task_with_token.py b/samples/snippets/create_http_task_with_token.py index 7320ede3..9c32c596 100644 --- a/samples/snippets/create_http_task_with_token.py +++ b/samples/snippets/create_http_task_with_token.py @@ -17,14 +17,16 @@ import datetime -def create_http_task(project, - queue, - location, - url, - service_account_email, - payload=None, - in_seconds=None, - task_name=None): +def create_http_task( + project, + queue, + location, + url, + service_account_email, + payload=None, + in_seconds=None, + task_name=None, +): # [START cloud_tasks_create_http_task_with_token] """Create a task for a given queue with an arbitrary payload.""" @@ -47,13 +49,11 @@ def create_http_task(project, # Construct the request body. task = { - 'http_request': { # Specify the type of request. - 'http_method': 'POST', - 'url': url, # The full url path that the task will be sent to. - 'oidc_token': { - 'service_account_email': service_account_email - } - } + "http_request": { # Specify the type of request. + "http_method": tasks_v2.HttpMethod.POST, + "url": url, # The full url path that the task will be sent to. + "oidc_token": {"service_account_email": service_account_email}, + } } if payload is not None: @@ -61,7 +61,7 @@ def create_http_task(project, converted_payload = payload.encode() # Add the payload to the request. - task['http_request']['body'] = converted_payload + task["http_request"]["body"] = converted_payload if in_seconds is not None: # Convert "seconds from now" into an rfc3339 datetime string. @@ -72,15 +72,17 @@ def create_http_task(project, timestamp.FromDatetime(d) # Add the timestamp to the tasks. - task['schedule_time'] = timestamp + task["schedule_time"] = timestamp if task_name is not None: # Add the name to tasks. - task['name'] = task_name + task["name"] = task_name # Use the client to build and send the task. - response = client.create_task(parent, task) + response = client.create_task(request={"parent": parent, "task": task}) - print('Created task {}'.format(response.name)) + print("Created task {}".format(response.name)) return response + + # [END cloud_tasks_create_http_task_with_token] diff --git a/samples/snippets/create_http_task_with_token_test.py b/samples/snippets/create_http_task_with_token_test.py index dd90d919..b93c3437 100644 --- a/samples/snippets/create_http_task_with_token_test.py +++ b/samples/snippets/create_http_task_with_token_test.py @@ -20,34 +20,32 @@ import create_http_task_with_token -TEST_PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') -TEST_LOCATION = os.getenv('TEST_QUEUE_LOCATION', 'us-central1') -TEST_QUEUE_NAME = f'my-queue-{uuid.uuid4().hex}' +TEST_PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") +TEST_LOCATION = os.getenv("TEST_QUEUE_LOCATION", "us-central1") +TEST_QUEUE_NAME = f"my-queue-{uuid.uuid4().hex}" TEST_SERVICE_ACCOUNT = ( - 'test-run-invoker@python-docs-samples-tests.iam.gserviceaccount.com') + "test-run-invoker@python-docs-samples-tests.iam.gserviceaccount.com" +) @pytest.fixture() def test_queue(): client = tasks_v2.CloudTasksClient() - parent = client.location_path(TEST_PROJECT_ID, TEST_LOCATION) + parent = f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}" queue = { # The fully qualified path to the queue - 'name': client.queue_path( - TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), + "name": client.queue_path(TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), } - q = client.create_queue(parent, queue) + q = client.create_queue(request={"parent": parent, "queue": queue}) yield q - client.delete_queue(q.name) + client.delete_queue(request={"name": q.name}) def test_create_http_task_with_token(test_queue): - url = 'https://example.com/task_handler' - result = create_http_task_with_token.create_http_task(TEST_PROJECT_ID, - TEST_QUEUE_NAME, - TEST_LOCATION, - url, - TEST_SERVICE_ACCOUNT) + url = "https://example.com/task_handler" + result = create_http_task_with_token.create_http_task( + TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION, url, TEST_SERVICE_ACCOUNT + ) assert TEST_QUEUE_NAME in result.name diff --git a/samples/snippets/create_queue.py b/samples/snippets/create_queue.py index d8d4dfae..2192daff 100644 --- a/samples/snippets/create_queue.py +++ b/samples/snippets/create_queue.py @@ -22,14 +22,16 @@ def create_queue(project, queue_name, location): client = tasks_v2.CloudTasksClient() # Construct the fully qualified location path. - parent = client.location_path(project, location) + parent = f"projects/{project}/locations/{location}" # Construct the create queue request. - queue = {'name': client.queue_path(project, location, queue_name)} + queue = {"name": client.queue_path(project, location, queue_name)} # Use the client to create the queue. - response = client.create_queue(parent, queue) + response = client.create_queue(request={"parent": parent, "queue": queue}) - print('Created queue {}'.format(response.name)) + print("Created queue {}".format(response.name)) return response + + # [END cloud_tasks_create_queue] diff --git a/samples/snippets/create_queue_test.py b/samples/snippets/create_queue_test.py index 1f623a2a..64ee0597 100644 --- a/samples/snippets/create_queue_test.py +++ b/samples/snippets/create_queue_test.py @@ -20,9 +20,9 @@ import create_queue -TEST_PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] -TEST_LOCATION = os.getenv('TEST_QUEUE_LOCATION', 'us-central1') -TEST_QUEUE_NAME = f'my-queue-{uuid.uuid4().hex}' +TEST_PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TEST_LOCATION = os.getenv("TEST_QUEUE_LOCATION", "us-central1") +TEST_QUEUE_NAME = f"my-queue-{uuid.uuid4().hex}" @pytest.fixture() @@ -32,9 +32,9 @@ def test_queue(): yield q - client.delete_queue(q.name) + client.delete_queue(request={"name": q.name}) def test_create_queue(capsys, test_queue): out, _ = capsys.readouterr() - assert 'Created queue' in out + assert "Created queue" in out diff --git a/samples/snippets/delete_queue.py b/samples/snippets/delete_queue.py index d6951424..0c76a962 100644 --- a/samples/snippets/delete_queue.py +++ b/samples/snippets/delete_queue.py @@ -25,6 +25,8 @@ def delete_queue(project, queue_name, location): queue = client.queue_path(project, location, queue_name) # Use the client to delete the queue. - client.delete_queue(queue) - print('Deleted queue') + client.delete_queue(request={"name": queue}) + print("Deleted queue") + + # [END cloud_tasks_delete_queue] diff --git a/samples/snippets/delete_queue_test.py b/samples/snippets/delete_queue_test.py index 4e1acf40..e814c910 100644 --- a/samples/snippets/delete_queue_test.py +++ b/samples/snippets/delete_queue_test.py @@ -22,35 +22,32 @@ import delete_queue -TEST_PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] -TEST_LOCATION = os.getenv('TEST_QUEUE_LOCATION', 'us-central1') -TEST_QUEUE_NAME = f'my-queue-{uuid.uuid4().hex}' +TEST_PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TEST_LOCATION = os.getenv("TEST_QUEUE_LOCATION", "us-central1") +TEST_QUEUE_NAME = f"my-queue-{uuid.uuid4().hex}" @pytest.fixture() def test_queue(): client = tasks_v2.CloudTasksClient() - parent = client.location_path(TEST_PROJECT_ID, TEST_LOCATION) + parent = f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}" queue = { # The fully qualified path to the queue - 'name': client.queue_path( - TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), + "name": client.queue_path(TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), } - q = client.create_queue(parent, queue) + q = client.create_queue(request={"parent": parent, "queue": queue}) yield q try: # Attempt to delete the queue in case the sample failed. - client.delete_queue(q.name) + client.delete_queue(request={"name": q.name}) except exceptions.NotFound: # The queue was already successfully deleted. - print('Queue already deleted successfully') + print("Queue already deleted successfully") def test_delete_queue(capsys, test_queue): - delete_queue.delete_queue( - TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION - ) + delete_queue.delete_queue(TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION) out, _ = capsys.readouterr() - assert 'Deleted queue' in out + assert "Deleted queue" in out diff --git a/samples/snippets/list_queues.py b/samples/snippets/list_queues.py index fa48907f..17b7886c 100644 --- a/samples/snippets/list_queues.py +++ b/samples/snippets/list_queues.py @@ -22,15 +22,19 @@ def list_queues(project, location): client = tasks_v2.CloudTasksClient() # Construct the fully qualified location path. - parent = client.location_path(project, location) + parent = f"projects/{project}/locations/{location}" # Use the client to obtain the queues. - response = client.list_queues(parent) + response = client.list_queues(request={"parent": parent}) # Print the results. + num_results = 0 for queue in response: + num_results = num_results + 1 print(queue.name) - if response.num_results == 0: - print('No queues found!') + if num_results == 0: + print("No queues found!") + + # [END cloud_tasks_list_queues] diff --git a/samples/snippets/list_queues_test.py b/samples/snippets/list_queues_test.py index df11d0bd..2f1d07d9 100644 --- a/samples/snippets/list_queues_test.py +++ b/samples/snippets/list_queues_test.py @@ -20,36 +20,35 @@ import list_queues -TEST_PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] -TEST_LOCATION = os.getenv('TEST_QUEUE_LOCATION', 'us-central1') -TEST_QUEUE_NAME = f'my-queue-{uuid.uuid4().hex}' +TEST_PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TEST_LOCATION = os.getenv("TEST_QUEUE_LOCATION", "us-central1") +TEST_QUEUE_NAME = f"my-queue-{uuid.uuid4().hex}" @pytest.fixture() def test_queue(): client = tasks_v2.CloudTasksClient() - parent = client.location_path(TEST_PROJECT_ID, TEST_LOCATION) + parent = f"projects/{TEST_PROJECT_ID}/locations/{TEST_LOCATION}" queue = { # The fully qualified path to the queue - 'name': client.queue_path( - TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), + "name": client.queue_path(TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), } - q = client.create_queue(parent, queue) + q = client.create_queue(request={"parent": parent, "queue": queue}) yield q - client.delete_queue(q.name) + client.delete_queue(request={"name": q.name}) def test_list_queues_not_present(capsys): list_queues.list_queues(TEST_PROJECT_ID, TEST_LOCATION) out, _ = capsys.readouterr() - assert(TEST_QUEUE_NAME not in out) + assert TEST_QUEUE_NAME not in out def test_list_queues_present(capsys, test_queue): list_queues.list_queues(TEST_PROJECT_ID, TEST_LOCATION) out, _ = capsys.readouterr() - assert(TEST_QUEUE_NAME in out) + assert TEST_QUEUE_NAME in out diff --git a/scripts/fixup_tasks_v2_keywords.py b/scripts/fixup_tasks_v2_keywords.py new file mode 100644 index 00000000..30d65ed0 --- /dev/null +++ b/scripts/fixup_tasks_v2_keywords.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', ), + 'get_task': ('name', 'response_view', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/fixup_tasks_v2beta2_keywords.py b/scripts/fixup_tasks_v2beta2_keywords.py new file mode 100644 index 00000000..3d480836 --- /dev/null +++ b/scripts/fixup_tasks_v2beta2_keywords.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'acknowledge_task': ('name', 'schedule_time', ), + 'cancel_lease': ('name', 'schedule_time', 'response_view', ), + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', ), + 'get_task': ('name', 'response_view', ), + 'lease_tasks': ('parent', 'lease_duration', 'max_tasks', 'response_view', 'filter', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'renew_lease': ('name', 'schedule_time', 'lease_duration', 'response_view', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/fixup_tasks_v2beta3_keywords.py b/scripts/fixup_tasks_v2beta3_keywords.py new file mode 100644 index 00000000..30d65ed0 --- /dev/null +++ b/scripts/fixup_tasks_v2beta3_keywords.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', ), + 'get_task': ('name', 'response_view', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index e88249a5..a4b3aaba 100644 --- a/setup.py +++ b/setup.py @@ -24,9 +24,10 @@ version = "1.5.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - 'enum34; python_version < "3.4"', + "proto-plus >= 0.4.0", + "libcst >= 0.2.5", ] package_root = os.path.abspath(os.path.dirname(__file__)) @@ -36,7 +37,9 @@ readme = readme_file.read() packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] namespaces = ["google"] @@ -57,12 +60,10 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -70,7 +71,12 @@ packages=packages, namespace_packages=namespaces, install_requires=dependencies, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=3.6", + scripts=[ + "scripts/fixup_tasks_v2_keywords.py", + "scripts/fixup_tasks_v2beta2_keywords.py", + "scripts/fixup_tasks_v2beta3_keywords.py", + ], include_package_data=True, zip_safe=False, ) diff --git a/synth.metadata b/synth.metadata index 85423ab6..08fb9823 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,21 +4,21 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-tasks.git", - "sha": "bb3fdb26ff34eaff9e9aa869687de5a7ce5a43a2" + "sha": "1a5a65fd55ba23421b4e7fbce0e74a46697ba092" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "80f46100c047bc47efe0025ee537dc8ee413ad04" + "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "80f46100c047bc47efe0025ee537dc8ee413ad04" + "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" } } ], diff --git a/synth.py b/synth.py index f2a7d325..4d558c30 100644 --- a/synth.py +++ b/synth.py @@ -38,86 +38,19 @@ s.copy(library, excludes=excludes) - s.replace( - f"google/cloud/tasks_{version}/gapic/cloud_tasks_client.py", - "(Google IAM .*?_) ", - "\g<1>_ ", - ) - - # Issues with Anonymous ('__') links. Change to named. - s.replace(f"google/cloud/tasks_{version}/proto/*.py", ">`__", ">`_") - -# Wrapped link fails due to space in link (v2beta2) -s.replace( - "google/cloud/tasks_v2beta2/proto/queue_pb2.py", - "(in queue.yaml/xml) <\n\s+", - "\g<1>\n <", -) - -# Wrapped link fails due to newline (v2) -s.replace( - "google/cloud/tasks_v2/proto/queue_pb2.py", - """#retry_parameters> - `__\.""", - "#retry_parameters>`__.", -) - -# Restore updated example from PR #7025. -s.replace( - "google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py", - ">>> # TODO: Initialize `queue`:", - ">>> # Initialize `queue`:", -) -s.replace( - "google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py", - "^(\s+)>>> queue = {}\n", - "\g<1>>>> queue = {\n" - "\g<1>... # The fully qualified path to the queue\n" - "\g<1>... 'name': client.queue_path('[PROJECT]', '[LOCATION]', '[NAME]'),\n" - "\g<1>... 'app_engine_http_queue': {\n" - "\g<1>... 'app_engine_routing_override': {\n" - "\g<1>... # The App Engine service that will receive the tasks.\n" - "\g<1>... 'service': 'default',\n" - "\g<1>... },\n" - "\g<1>... },\n" - "\g<1>... }\n", -) - -# Fix enum docstring references -s.replace( - "google/cloud/**/cloud_tasks_client.py", - "types\.View", - "enums.Task.View") - -# Change wording of optional params to disambiguate -# client library request methods from Cloud Task requests -s.replace("google/cloud/**/*.py", -""" retry \(Optional\[google\.api_core\.retry\.Retry\]\): A retry object used - to retry requests\. If ``None`` is specified, requests will - be retried using a default configuration\. - timeout \(Optional\[float\]\): The amount of time, in seconds, to wait - for the request to complete\. Note that if ``retry`` is - specified, the timeout applies to each individual attempt\. - metadata \(Optional\[Sequence\[Tuple\[str, str\]\]\]\): Additional metadata - that is provided to the method\. - -""", -""" retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry client library requests. If ``None`` is specified, - requests will be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the client library request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the client library method. - -""") +# Fix docstring. +s.replace("google/cloud/*/types/target.py", "X-Google-\*", "X-Google-\\*") +s.replace("google/cloud/*/types/target.py", "X-AppEngine-\*", "X-AppEngine-\\*") # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=86, samples=True) -s.move(templated_files) +templated_files = common.py_library( + samples=False, # set to True only if there are samples + microgenerator=True, + cov_level=99, +) +s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file # ---------------------------------------------------------------------------- # Samples templates @@ -127,11 +60,4 @@ # TODO(busunkim): Use latest sphinx after microgenerator transition s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') -# Escape '_' in docstrings -s.replace( - "google/cloud/**/*_pb2.py", - """\_$""", - """\_""", -) - s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/tests/system/gapic/v2/test_system_tasks_v2.py b/tests/system/gapic/v2/test_system_tasks_v2.py index ff6c8243..def987f0 100644 --- a/tests/system/gapic/v2/test_system_tasks_v2.py +++ b/tests/system/gapic/v2/test_system_tasks_v2.py @@ -22,5 +22,6 @@ def test_list_queues(self): client = tasks_v2.CloudTasksClient() # Setup Request - parent = client.location_path(os.environ["PROJECT_ID"], "us-central1") - client.list_queues(parent) + project_id = os.environ["PROJECT_ID"] + parent = f"projects/{project_id}/locations/us-central1" + client.list_queues(request={"parent": parent}) diff --git a/tests/unit/gapic/tasks_v2/__init__.py b/tests/unit/gapic/tasks_v2/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/unit/gapic/tasks_v2/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/tasks_v2/test_cloud_tasks.py b/tests/unit/gapic/tasks_v2/test_cloud_tasks.py new file mode 100644 index 00000000..f4e2e44b --- /dev/null +++ b/tests/unit/gapic/tasks_v2/test_cloud_tasks.py @@ -0,0 +1,4193 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.tasks_v2.services.cloud_tasks import CloudTasksAsyncClient +from google.cloud.tasks_v2.services.cloud_tasks import CloudTasksClient +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.services.cloud_tasks import transports +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore +from google.type import expr_pb2 as expr # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient]) +def test_cloud_tasks_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "cloudtasks.googleapis.com:443" + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + assert transport == transports.CloudTasksGrpcTransport + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_tasks_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_tasks_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch( + "google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_queues( + transport: str = "grpc", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_from_dict(): + test_list_queues(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_queues_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ListQueuesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + call.return_value = cloudtasks.ListQueuesResponse() + + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_queues_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_queues_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), parent="parent_value", + ) + + +def test_list_queues_pager(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + +def test_list_queues_pages(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + async_pager = await client.list_queues(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) for i in responses) + + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_queues(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_queue(transport: str = "grpc", request_type=cloudtasks.GetQueueRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", state=queue.Queue.State.RUNNING, + ) + + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_from_dict(): + test_get_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.GetQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), name="name_value", + ) + + +def test_create_queue( + transport: str = "grpc", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", state=gct_queue.Queue.State.RUNNING, + ) + + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_from_dict(): + test_create_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.CreateQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue(name="name_value", state=gct_queue.Queue.State.RUNNING,) + ) + + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent="parent_value", queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].queue == gct_queue.Queue(name="name_value") + + +def test_create_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent="parent_value", queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].queue == gct_queue.Queue(name="name_value") + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +def test_update_queue( + transport: str = "grpc", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", state=gct_queue.Queue.State.RUNNING, + ) + + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_from_dict(): + test_update_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.UpdateQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue(name="name_value", state=gct_queue.Queue.State.RUNNING,) + ) + + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + request.queue.name = "queue.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + request.queue.name = "queue.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"] + + +def test_update_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].queue == gct_queue.Queue(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].queue == gct_queue.Queue(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_queue( + transport: str = "grpc", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_from_dict(): + test_delete_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.DeleteQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + call.return_value = None + + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), name="name_value", + ) + + +def test_purge_queue( + transport: str = "grpc", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", state=queue.Queue.State.RUNNING, + ) + + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_from_dict(): + test_purge_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_purge_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.PurgeQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_purge_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), name="name_value", + ) + + +def test_pause_queue( + transport: str = "grpc", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", state=queue.Queue.State.RUNNING, + ) + + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_from_dict(): + test_pause_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_pause_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.PauseQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_pause_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), name="name_value", + ) + + +def test_resume_queue( + transport: str = "grpc", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", state=queue.Queue.State.RUNNING, + ) + + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_from_dict(): + test_resume_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_resume_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ResumeQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_resume_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), name="name_value", + ) + + +def test_get_iam_policy( + transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy.GetIamPolicyRequest(), resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy.GetIamPolicyRequest(), resource="resource_value", + ) + + +def test_set_iam_policy( + transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy.SetIamPolicyRequest(), resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy.SetIamPolicyRequest(), resource="resource_value", + ) + + +def test_test_iam_permissions( + transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + assert args[0].permissions == ["permissions_value"] + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + assert args[0].permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +def test_list_tasks(transport: str = "grpc", request_type=cloudtasks.ListTasksRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_from_dict(): + test_list_tasks(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ListTasksRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + call.return_value = cloudtasks.ListTasksResponse() + + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_tasks_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), parent="parent_value", + ) + + +def test_list_tasks_pager(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_tasks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_task(transport: str = "grpc", request_type=cloudtasks.GetTaskRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_get_task_from_dict(): + test_get_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.GetTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_get_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + call.return_value = task.Task() + + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), name="name_value", + ) + + +def test_create_task( + transport: str = "grpc", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_from_dict(): + test_create_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.CreateTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + ) + + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + call.return_value = gct_task.Task() + + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent="parent_value", task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].task == gct_task.Task(name="name_value") + + +def test_create_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent="parent_value", task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].task == gct_task.Task(name="name_value") + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +def test_delete_task( + transport: str = "grpc", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_from_dict(): + test_delete_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.DeleteTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + call.return_value = None + + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), name="name_value", + ) + + +def test_run_task(transport: str = "grpc", request_type=cloudtasks.RunTaskRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_run_task_from_dict(): + test_run_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.RunTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_run_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + call.return_value = task.Task() + + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_run_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_run_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.CloudTasksGrpcTransport,) + + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_queues", + "get_queue", + "create_queue", + "update_queue", + "delete_queue", + "purge_queue", + "pause_queue", + "resume_queue", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + "list_tasks", + "get_task", + "create_task", + "delete_task", + "run_task", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_cloud_tasks_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_host_no_port(): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com" + ), + ) + assert client._transport._host == "cloudtasks.googleapis.com:443" + + +def test_cloud_tasks_host_with_port(): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com:8000" + ), + ) + assert client._transport._host == "cloudtasks.googleapis.com:8000" + + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_tasks_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_tasks_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + + expected = "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, location=location, queue=queue, + ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + + +def test_task_path(): + project = "squid" + location = "clam" + queue = "whelk" + task = "octopus" + + expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, location=location, queue=queue, task=task, + ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "queue": "cuttlefish", + "task": "mussel", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/tasks_v2beta2/__init__.py b/tests/unit/gapic/tasks_v2beta2/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/unit/gapic/tasks_v2beta2/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py new file mode 100644 index 00000000..ec78a0d1 --- /dev/null +++ b/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py @@ -0,0 +1,5026 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.tasks_v2beta2.services.cloud_tasks import CloudTasksAsyncClient +from google.cloud.tasks_v2beta2.services.cloud_tasks import CloudTasksClient +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.services.cloud_tasks import transports +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore +from google.type import expr_pb2 as expr # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient]) +def test_cloud_tasks_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "cloudtasks.googleapis.com:443" + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + assert transport == transports.CloudTasksGrpcTransport + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_tasks_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_tasks_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch( + "google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_queues( + transport: str = "grpc", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_from_dict(): + test_list_queues(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_queues_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ListQueuesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + call.return_value = cloudtasks.ListQueuesResponse() + + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_queues_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_queues_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), parent="parent_value", + ) + + +def test_list_queues_pager(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + +def test_list_queues_pages(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + async_pager = await client.list_queues(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) for i in responses) + + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_queues(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_queue(transport: str = "grpc", request_type=cloudtasks.GetQueueRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_from_dict(): + test_get_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.GetQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), name="name_value", + ) + + +def test_create_queue( + transport: str = "grpc", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_from_dict(): + test_create_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.CreateQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue(name="name_value", state=gct_queue.Queue.State.RUNNING,) + ) + + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent="parent_value", queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].queue == gct_queue.Queue(name="name_value") + + +def test_create_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent="parent_value", queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].queue == gct_queue.Queue(name="name_value") + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +def test_update_queue( + transport: str = "grpc", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_from_dict(): + test_update_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.UpdateQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue(name="name_value", state=gct_queue.Queue.State.RUNNING,) + ) + + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + request.queue.name = "queue.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + request.queue.name = "queue.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"] + + +def test_update_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].queue == gct_queue.Queue(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].queue == gct_queue.Queue(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_queue( + transport: str = "grpc", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_from_dict(): + test_delete_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.DeleteQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + call.return_value = None + + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), name="name_value", + ) + + +def test_purge_queue( + transport: str = "grpc", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_from_dict(): + test_purge_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_purge_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.PurgeQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_purge_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), name="name_value", + ) + + +def test_pause_queue( + transport: str = "grpc", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_from_dict(): + test_pause_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_pause_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.PauseQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_pause_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), name="name_value", + ) + + +def test_resume_queue( + transport: str = "grpc", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + app_engine_http_target=target.AppEngineHttpTarget( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_from_dict(): + test_resume_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_resume_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ResumeQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue(name="name_value", state=queue.Queue.State.RUNNING,) + ) + + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_resume_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), name="name_value", + ) + + +def test_get_iam_policy( + transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy.GetIamPolicyRequest(), resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy.GetIamPolicyRequest(), resource="resource_value", + ) + + +def test_set_iam_policy( + transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy.SetIamPolicyRequest(), resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy.SetIamPolicyRequest(), resource="resource_value", + ) + + +def test_test_iam_permissions( + transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + assert args[0].permissions == ["permissions_value"] + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + assert args[0].permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +def test_list_tasks(transport: str = "grpc", request_type=cloudtasks.ListTasksRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_from_dict(): + test_list_tasks(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ListTasksRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + call.return_value = cloudtasks.ListTasksResponse() + + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_tasks_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), parent="parent_value", + ) + + +def test_list_tasks_pager(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_tasks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_task(transport: str = "grpc", request_type=cloudtasks.GetTaskRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_get_task_from_dict(): + test_get_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.GetTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task(name="name_value", view=task.Task.View.BASIC,) + ) + + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_get_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + call.return_value = task.Task() + + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), name="name_value", + ) + + +def test_create_task( + transport: str = "grpc", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name="name_value", + view=gct_task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + + assert response.name == "name_value" + + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_from_dict(): + test_create_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.CreateTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_task.Task(name="name_value", view=gct_task.Task.View.BASIC,) + ) + + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + + assert response.name == "name_value" + + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + call.return_value = gct_task.Task() + + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent="parent_value", task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].task == gct_task.Task(name="name_value") + + +def test_create_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent="parent_value", task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].task == gct_task.Task(name="name_value") + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +def test_delete_task( + transport: str = "grpc", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_from_dict(): + test_delete_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.DeleteTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + call.return_value = None + + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), name="name_value", + ) + + +def test_lease_tasks( + transport: str = "grpc", request_type=cloudtasks.LeaseTasksRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.lease_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + + response = client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.LeaseTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.LeaseTasksResponse) + + +def test_lease_tasks_from_dict(): + test_lease_tasks(request_type=dict) + + +@pytest.mark.asyncio +async def test_lease_tasks_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.LeaseTasksRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.lease_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.LeaseTasksResponse() + ) + + response = await client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.LeaseTasksResponse) + + +def test_lease_tasks_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.LeaseTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.lease_tasks), "__call__") as call: + call.return_value = cloudtasks.LeaseTasksResponse() + + client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_lease_tasks_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.LeaseTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.lease_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.LeaseTasksResponse() + ) + + await client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_lease_tasks_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.lease_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.lease_tasks( + parent="parent_value", lease_duration=duration.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert DurationRule().to_proto(args[0].lease_duration) == duration.Duration( + seconds=751 + ) + + +def test_lease_tasks_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lease_tasks( + cloudtasks.LeaseTasksRequest(), + parent="parent_value", + lease_duration=duration.Duration(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_lease_tasks_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.lease_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.LeaseTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.lease_tasks( + parent="parent_value", lease_duration=duration.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert DurationRule().to_proto(args[0].lease_duration) == duration.Duration( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_lease_tasks_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.lease_tasks( + cloudtasks.LeaseTasksRequest(), + parent="parent_value", + lease_duration=duration.Duration(seconds=751), + ) + + +def test_acknowledge_task( + transport: str = "grpc", request_type=cloudtasks.AcknowledgeTaskRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.acknowledge_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.AcknowledgeTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_task_from_dict(): + test_acknowledge_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_acknowledge_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.AcknowledgeTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.AcknowledgeTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.acknowledge_task), "__call__" + ) as call: + call.return_value = None + + client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_acknowledge_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.AcknowledgeTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_acknowledge_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.acknowledge_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.acknowledge_task( + name="name_value", schedule_time=timestamp.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp.Timestamp( + seconds=751 + ) + + +def test_acknowledge_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge_task( + cloudtasks.AcknowledgeTaskRequest(), + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_acknowledge_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.acknowledge_task( + name="name_value", schedule_time=timestamp.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp.Timestamp( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_acknowledge_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.acknowledge_task( + cloudtasks.AcknowledgeTaskRequest(), + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + ) + + +def test_renew_lease( + transport: str = "grpc", request_type=cloudtasks.RenewLeaseRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.renew_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.RenewLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_renew_lease_from_dict(): + test_renew_lease(request_type=dict) + + +@pytest.mark.asyncio +async def test_renew_lease_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.RenewLeaseRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.renew_lease), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task(name="name_value", view=task.Task.View.BASIC,) + ) + + response = await client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_renew_lease_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RenewLeaseRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.renew_lease), "__call__") as call: + call.return_value = task.Task() + + client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_renew_lease_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RenewLeaseRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.renew_lease), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_renew_lease_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.renew_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.renew_lease( + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + lease_duration=duration.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp.Timestamp( + seconds=751 + ) + + assert DurationRule().to_proto(args[0].lease_duration) == duration.Duration( + seconds=751 + ) + + +def test_renew_lease_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.renew_lease( + cloudtasks.RenewLeaseRequest(), + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + lease_duration=duration.Duration(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_renew_lease_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.renew_lease), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.renew_lease( + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + lease_duration=duration.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp.Timestamp( + seconds=751 + ) + + assert DurationRule().to_proto(args[0].lease_duration) == duration.Duration( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_renew_lease_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.renew_lease( + cloudtasks.RenewLeaseRequest(), + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + lease_duration=duration.Duration(seconds=751), + ) + + +def test_cancel_lease( + transport: str = "grpc", request_type=cloudtasks.CancelLeaseRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.cancel_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.CancelLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_cancel_lease_from_dict(): + test_cancel_lease(request_type=dict) + + +@pytest.mark.asyncio +async def test_cancel_lease_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.CancelLeaseRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.cancel_lease), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task(name="name_value", view=task.Task.View.BASIC,) + ) + + response = await client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_cancel_lease_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CancelLeaseRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.cancel_lease), "__call__") as call: + call.return_value = task.Task() + + client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_lease_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CancelLeaseRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.cancel_lease), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_cancel_lease_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.cancel_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_lease( + name="name_value", schedule_time=timestamp.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp.Timestamp( + seconds=751 + ) + + +def test_cancel_lease_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_lease( + cloudtasks.CancelLeaseRequest(), + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_cancel_lease_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.cancel_lease), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_lease( + name="name_value", schedule_time=timestamp.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp.Timestamp( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_cancel_lease_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_lease( + cloudtasks.CancelLeaseRequest(), + name="name_value", + schedule_time=timestamp.Timestamp(seconds=751), + ) + + +def test_run_task(transport: str = "grpc", request_type=cloudtasks.RunTaskRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_run_task_from_dict(): + test_run_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.RunTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task(name="name_value", view=task.Task.View.BASIC,) + ) + + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.view == task.Task.View.BASIC + + +def test_run_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + call.return_value = task.Task() + + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_run_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_run_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.CloudTasksGrpcTransport,) + + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_queues", + "get_queue", + "create_queue", + "update_queue", + "delete_queue", + "purge_queue", + "pause_queue", + "resume_queue", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + "list_tasks", + "get_task", + "create_task", + "delete_task", + "lease_tasks", + "acknowledge_task", + "renew_lease", + "cancel_lease", + "run_task", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_cloud_tasks_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_host_no_port(): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com" + ), + ) + assert client._transport._host == "cloudtasks.googleapis.com:443" + + +def test_cloud_tasks_host_with_port(): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com:8000" + ), + ) + assert client._transport._host == "cloudtasks.googleapis.com:8000" + + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_tasks_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_tasks_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_task_path(): + project = "squid" + location = "clam" + queue = "whelk" + task = "octopus" + + expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, location=location, queue=queue, task=task, + ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "queue": "cuttlefish", + "task": "mussel", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + + expected = "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, location=location, queue=queue, + ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/tasks_v2beta3/__init__.py b/tests/unit/gapic/tasks_v2beta3/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/unit/gapic/tasks_v2beta3/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py new file mode 100644 index 00000000..933dba70 --- /dev/null +++ b/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py @@ -0,0 +1,4283 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksAsyncClient +from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksClient +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.services.cloud_tasks import transports +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore +from google.type import expr_pb2 as expr # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient]) +def test_cloud_tasks_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "cloudtasks.googleapis.com:443" + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + assert transport == transports.CloudTasksGrpcTransport + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_tasks_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_tasks_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch( + "google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_queues( + transport: str = "grpc", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_from_dict(): + test_list_queues(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_queues_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ListQueuesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + call.return_value = cloudtasks.ListQueuesResponse() + + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_queues_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_queues_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), parent="parent_value", + ) + + +def test_list_queues_pager(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + +def test_list_queues_pages(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + async_pager = await client.list_queues(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) for i in responses) + + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_queues), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(), queue.Queue(), queue.Queue(),], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",), + cloudtasks.ListQueuesResponse( + queues=[queue.Queue(),], next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_queues(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_queue(transport: str = "grpc", request_type=cloudtasks.GetQueueRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_get_queue_from_dict(): + test_get_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.GetQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + ) + ) + + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_get_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), name="name_value", + ) + + +def test_create_queue( + transport: str = "grpc", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type=gct_queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + assert response.type == gct_queue.Queue.Type.PULL + + +def test_create_queue_from_dict(): + test_create_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.CreateQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type=gct_queue.Queue.Type.PULL, + ) + ) + + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + assert response.type == gct_queue.Queue.Type.PULL + + +def test_create_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent="parent_value", queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].queue == gct_queue.Queue(name="name_value") + + +def test_create_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent="parent_value", queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].queue == gct_queue.Queue(name="name_value") + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +def test_update_queue( + transport: str = "grpc", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type=gct_queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + assert response.type == gct_queue.Queue.Type.PULL + + +def test_update_queue_from_dict(): + test_update_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.UpdateQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type=gct_queue.Queue.Type.PULL, + ) + ) + + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + + assert response.name == "name_value" + + assert response.state == gct_queue.Queue.State.RUNNING + + assert response.type == gct_queue.Queue.Type.PULL + + +def test_update_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + request.queue.name = "queue.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + request.queue.name = "queue.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"] + + +def test_update_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].queue == gct_queue.Queue(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].queue == gct_queue.Queue(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_queue( + transport: str = "grpc", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_from_dict(): + test_delete_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.DeleteQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + call.return_value = None + + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), name="name_value", + ) + + +def test_purge_queue( + transport: str = "grpc", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_purge_queue_from_dict(): + test_purge_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_purge_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.PurgeQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + ) + ) + + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_purge_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_purge_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.purge_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), name="name_value", + ) + + +def test_pause_queue( + transport: str = "grpc", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_pause_queue_from_dict(): + test_pause_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_pause_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.PauseQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + ) + ) + + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_pause_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_pause_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), name="name_value", + ) + + +def test_resume_queue( + transport: str = "grpc", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + app_engine_http_queue=target.AppEngineHttpQueue( + app_engine_routing_override=target.AppEngineRouting( + service="service_value" + ) + ), + ) + + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_resume_queue_from_dict(): + test_resume_queue(request_type=dict) + + +@pytest.mark.asyncio +async def test_resume_queue_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ResumeQueueRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type=queue.Queue.Type.PULL, + ) + ) + + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + + assert response.name == "name_value" + + assert response.state == queue.Queue.State.RUNNING + + assert response.type == queue.Queue.Type.PULL + + +def test_resume_queue_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + call.return_value = queue.Queue() + + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_resume_queue_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_queue), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), name="name_value", + ) + + +def test_get_iam_policy( + transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy.GetIamPolicyRequest(), resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy.GetIamPolicyRequest(), resource="resource_value", + ) + + +def test_set_iam_policy( + transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy.SetIamPolicyRequest(), resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy(resource="resource_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy.SetIamPolicyRequest(), resource="resource_value", + ) + + +def test_test_iam_permissions( + transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + assert args[0].permissions == ["permissions_value"] + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].resource == "resource_value" + + assert args[0].permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +def test_list_tasks(transport: str = "grpc", request_type=cloudtasks.ListTasksRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_from_dict(): + test_list_tasks(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_tasks_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.ListTasksRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + call.return_value = cloudtasks.ListTasksResponse() + + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_tasks_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), parent="parent_value", + ) + + +def test_list_tasks_pager(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + async_pager = await client.list_tasks(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc", + ), + cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",), + cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",), + cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_tasks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_task(transport: str = "grpc", request_type=cloudtasks.GetTaskRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_get_task_from_dict(): + test_get_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.GetTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_get_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + call.return_value = task.Task() + + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), name="name_value", + ) + + +def test_create_task( + transport: str = "grpc", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_from_dict(): + test_create_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.CreateTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + ) + + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + call.return_value = gct_task.Task() + + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent="parent_value", task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].task == gct_task.Task(name="name_value") + + +def test_create_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent="parent_value", task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].task == gct_task.Task(name="name_value") + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +def test_delete_task( + transport: str = "grpc", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_from_dict(): + test_delete_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.DeleteTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + call.return_value = None + + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), name="name_value", + ) + + +def test_run_task(transport: str = "grpc", request_type=cloudtasks.RunTaskRequest): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + app_engine_http_request=target.AppEngineHttpRequest( + http_method=target.HttpMethod.POST + ), + ) + + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_run_task_from_dict(): + test_run_task(request_type=dict) + + +@pytest.mark.asyncio +async def test_run_task_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = cloudtasks.RunTaskRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + + assert response.name == "name_value" + + assert response.dispatch_count == 1496 + + assert response.response_count == 1527 + + assert response.view == task.Task.View.BASIC + + +def test_run_task_field_headers(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + call.return_value = task.Task() + + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_run_task_flattened(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_run_task_flattened_error(): + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.run_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.CloudTasksGrpcTransport,) + + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_queues", + "get_queue", + "create_queue", + "update_queue", + "delete_queue", + "purge_queue", + "pause_queue", + "resume_queue", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + "list_tasks", + "get_task", + "create_task", + "delete_task", + "run_task", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_cloud_tasks_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_host_no_port(): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com" + ), + ) + assert client._transport._host == "cloudtasks.googleapis.com:443" + + +def test_cloud_tasks_host_with_port(): + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com:8000" + ), + ) + assert client._transport._host == "cloudtasks.googleapis.com:8000" + + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_tasks_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_cloud_tasks_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_task_path(): + project = "squid" + location = "clam" + queue = "whelk" + task = "octopus" + + expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, location=location, queue=queue, task=task, + ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "queue": "cuttlefish", + "task": "mussel", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + + expected = "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, location=location, queue=queue, + ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudTasksClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/v2/test_cloud_tasks_client_v2.py b/tests/unit/gapic/v2/test_cloud_tasks_client_v2.py deleted file mode 100644 index cf839628..00000000 --- a/tests/unit/gapic/v2/test_cloud_tasks_client_v2.py +++ /dev/null @@ -1,703 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import tasks_v2 -from google.cloud.tasks_v2.proto import cloudtasks_pb2 -from google.cloud.tasks_v2.proto import queue_pb2 -from google.cloud.tasks_v2.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestCloudTasksClient(object): - def test_list_queues(self): - # Setup Expected Response - next_page_token = "" - queues_element = {} - queues = [queues_element] - expected_response = {"next_page_token": next_page_token, "queues": queues} - expected_response = cloudtasks_pb2.ListQueuesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_queues(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.queues[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ListQueuesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_queues_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_queues(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.get_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.GetQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.get_queue(name) - - def test_create_queue(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - queue = {} - - response = client.create_queue(parent, queue) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - queue = {} - - with pytest.raises(CustomException): - client.create_queue(parent, queue) - - def test_update_queue(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - queue = {} - - response = client.update_queue(queue) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.UpdateQueueRequest(queue=queue) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - queue = {} - - with pytest.raises(CustomException): - client.update_queue(queue) - - def test_delete_queue(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - client.delete_queue(name) - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.DeleteQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.delete_queue(name) - - def test_purge_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.purge_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.PurgeQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_purge_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.purge_queue(name) - - def test_pause_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.pause_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.PauseQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_pause_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.pause_queue(name) - - def test_resume_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.resume_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ResumeQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_resume_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.resume_queue(name) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) - - def test_list_tasks(self): - # Setup Expected Response - next_page_token = "" - tasks_element = {} - tasks = [tasks_element] - expected_response = {"next_page_token": next_page_token, "tasks": tasks} - expected_response = cloudtasks_pb2.ListTasksResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - paged_list_response = client.list_tasks(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.tasks[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ListTasksRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_tasks_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - paged_list_response = client.list_tasks(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_task(self): - # Setup Expected Response - name_2 = "name2-1052831874" - dispatch_count = 1217252086 - response_count = 424727441 - expected_response = { - "name": name_2, - "dispatch_count": dispatch_count, - "response_count": response_count, - } - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - response = client.get_task(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.GetTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.get_task(name) - - def test_create_task(self): - # Setup Expected Response - name = "name3373707" - dispatch_count = 1217252086 - response_count = 424727441 - expected_response = { - "name": name, - "dispatch_count": dispatch_count, - "response_count": response_count, - } - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - task = {} - - response = client.create_task(parent, task) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.CreateTaskRequest(parent=parent, task=task) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - task = {} - - with pytest.raises(CustomException): - client.create_task(parent, task) - - def test_delete_task(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - client.delete_task(name) - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.DeleteTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.delete_task(name) - - def test_run_task(self): - # Setup Expected Response - name_2 = "name2-1052831874" - dispatch_count = 1217252086 - response_count = 424727441 - expected_response = { - "name": name_2, - "dispatch_count": dispatch_count, - "response_count": response_count, - } - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - response = client.run_task(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.RunTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_run_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.run_task(name) diff --git a/tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py b/tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py deleted file mode 100644 index dad7c44f..00000000 --- a/tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py +++ /dev/null @@ -1,849 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import tasks_v2beta2 -from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 -from google.cloud.tasks_v2beta2.proto import queue_pb2 -from google.cloud.tasks_v2beta2.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import timestamp_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestCloudTasksClient(object): - def test_list_queues(self): - # Setup Expected Response - next_page_token = "" - queues_element = {} - queues = [queues_element] - expected_response = {"next_page_token": next_page_token, "queues": queues} - expected_response = cloudtasks_pb2.ListQueuesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_queues(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.queues[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ListQueuesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_queues_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_queues(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.get_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.GetQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.get_queue(name) - - def test_create_queue(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - queue = {} - - response = client.create_queue(parent, queue) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - queue = {} - - with pytest.raises(CustomException): - client.create_queue(parent, queue) - - def test_update_queue(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - queue = {} - - response = client.update_queue(queue) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.UpdateQueueRequest(queue=queue) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - queue = {} - - with pytest.raises(CustomException): - client.update_queue(queue) - - def test_delete_queue(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - client.delete_queue(name) - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.DeleteQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.delete_queue(name) - - def test_purge_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.purge_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.PurgeQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_purge_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.purge_queue(name) - - def test_pause_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.pause_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.PauseQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_pause_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.pause_queue(name) - - def test_resume_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.resume_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ResumeQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_resume_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.resume_queue(name) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) - - def test_list_tasks(self): - # Setup Expected Response - next_page_token = "" - tasks_element = {} - tasks = [tasks_element] - expected_response = {"next_page_token": next_page_token, "tasks": tasks} - expected_response = cloudtasks_pb2.ListTasksResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - paged_list_response = client.list_tasks(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.tasks[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ListTasksRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_tasks_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - paged_list_response = client.list_tasks(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_task(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - response = client.get_task(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.GetTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.get_task(name) - - def test_create_task(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - task = {} - - response = client.create_task(parent, task) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.CreateTaskRequest(parent=parent, task=task) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - task = {} - - with pytest.raises(CustomException): - client.create_task(parent, task) - - def test_delete_task(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - client.delete_task(name) - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.DeleteTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.delete_task(name) - - def test_lease_tasks(self): - # Setup Expected Response - expected_response = {} - expected_response = cloudtasks_pb2.LeaseTasksResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - lease_duration = {} - - response = client.lease_tasks(parent, lease_duration) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.LeaseTasksRequest( - parent=parent, lease_duration=lease_duration - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_lease_tasks_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - lease_duration = {} - - with pytest.raises(CustomException): - client.lease_tasks(parent, lease_duration) - - def test_acknowledge_task(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - schedule_time = {} - - client.acknowledge_task(name, schedule_time) - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.AcknowledgeTaskRequest( - name=name, schedule_time=schedule_time - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_acknowledge_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - schedule_time = {} - - with pytest.raises(CustomException): - client.acknowledge_task(name, schedule_time) - - def test_renew_lease(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - schedule_time = {} - lease_duration = {} - - response = client.renew_lease(name, schedule_time, lease_duration) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.RenewLeaseRequest( - name=name, schedule_time=schedule_time, lease_duration=lease_duration - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_renew_lease_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - schedule_time = {} - lease_duration = {} - - with pytest.raises(CustomException): - client.renew_lease(name, schedule_time, lease_duration) - - def test_cancel_lease(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - schedule_time = {} - - response = client.cancel_lease(name, schedule_time) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.CancelLeaseRequest( - name=name, schedule_time=schedule_time - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_cancel_lease_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - schedule_time = {} - - with pytest.raises(CustomException): - client.cancel_lease(name, schedule_time) - - def test_run_task(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - response = client.run_task(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.RunTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_run_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta2.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.run_task(name) diff --git a/tests/unit/gapic/v2beta3/test_cloud_tasks_client_v2beta3.py b/tests/unit/gapic/v2beta3/test_cloud_tasks_client_v2beta3.py deleted file mode 100644 index 2c55401e..00000000 --- a/tests/unit/gapic/v2beta3/test_cloud_tasks_client_v2beta3.py +++ /dev/null @@ -1,703 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import tasks_v2beta3 -from google.cloud.tasks_v2beta3.proto import cloudtasks_pb2 -from google.cloud.tasks_v2beta3.proto import queue_pb2 -from google.cloud.tasks_v2beta3.proto import task_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestCloudTasksClient(object): - def test_list_queues(self): - # Setup Expected Response - next_page_token = "" - queues_element = {} - queues = [queues_element] - expected_response = {"next_page_token": next_page_token, "queues": queues} - expected_response = cloudtasks_pb2.ListQueuesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_queues(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.queues[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ListQueuesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_queues_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_queues(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.get_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.GetQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.get_queue(name) - - def test_create_queue(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - queue = {} - - response = client.create_queue(parent, queue) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - queue = {} - - with pytest.raises(CustomException): - client.create_queue(parent, queue) - - def test_update_queue(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - queue = {} - - response = client.update_queue(queue) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.UpdateQueueRequest(queue=queue) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - queue = {} - - with pytest.raises(CustomException): - client.update_queue(queue) - - def test_delete_queue(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - client.delete_queue(name) - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.DeleteQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.delete_queue(name) - - def test_purge_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.purge_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.PurgeQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_purge_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.purge_queue(name) - - def test_pause_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.pause_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.PauseQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_pause_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.pause_queue(name) - - def test_resume_queue(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = queue_pb2.Queue(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - response = client.resume_queue(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ResumeQueueRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_resume_queue_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - with pytest.raises(CustomException): - client.resume_queue(name) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) - - def test_list_tasks(self): - # Setup Expected Response - next_page_token = "" - tasks_element = {} - tasks = [tasks_element] - expected_response = {"next_page_token": next_page_token, "tasks": tasks} - expected_response = cloudtasks_pb2.ListTasksResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - paged_list_response = client.list_tasks(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.tasks[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.ListTasksRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_tasks_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - - paged_list_response = client.list_tasks(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_task(self): - # Setup Expected Response - name_2 = "name2-1052831874" - dispatch_count = 1217252086 - response_count = 424727441 - expected_response = { - "name": name_2, - "dispatch_count": dispatch_count, - "response_count": response_count, - } - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - response = client.get_task(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.GetTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.get_task(name) - - def test_create_task(self): - # Setup Expected Response - name = "name3373707" - dispatch_count = 1217252086 - response_count = 424727441 - expected_response = { - "name": name, - "dispatch_count": dispatch_count, - "response_count": response_count, - } - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - task = {} - - response = client.create_task(parent, task) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.CreateTaskRequest(parent=parent, task=task) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - parent = client.queue_path("[PROJECT]", "[LOCATION]", "[QUEUE]") - task = {} - - with pytest.raises(CustomException): - client.create_task(parent, task) - - def test_delete_task(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - client.delete_task(name) - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.DeleteTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.delete_task(name) - - def test_run_task(self): - # Setup Expected Response - name_2 = "name2-1052831874" - dispatch_count = 1217252086 - response_count = 424727441 - expected_response = { - "name": name_2, - "dispatch_count": dispatch_count, - "response_count": response_count, - } - expected_response = task_pb2.Task(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup Request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - response = client.run_task(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloudtasks_pb2.RunTaskRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_run_task_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = tasks_v2beta3.CloudTasksClient() - - # Setup request - name = client.task_path("[PROJECT]", "[LOCATION]", "[QUEUE]", "[TASK]") - - with pytest.raises(CustomException): - client.run_task(name)