diff --git a/.coveragerc b/.coveragerc index 218f2ddf..d6dce8f1 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,27 +1,11 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True [report] fail_under = 100 show_missing = True -omit = google/cloud/dataproc/__init__.py +omit = + google/cloud/dataproc/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER @@ -31,4 +15,4 @@ exclude_lines = # This is added at the module level as a safeguard for if someone # generates the code and tries to run it without pip installing. This # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound \ No newline at end of file + except pkg_resources.DistributionNotFound diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 00000000..fc281c05 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b9daa52f..b4243ced 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 0cd63701..ef511aab 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-dataproc +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-dataproc" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 11181078..8cf1dcbf 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-dataproc/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 00000000..68eddb57 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-dataproc + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 00000000..cf5de74c --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index a18decba..fbde31d4 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-dataproc # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9024b15..32302e48 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/.trampolinerc b/.trampolinerc index 995ee291..383b6ec8 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index ecad28ae..be20ff0b 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d12..e783f4c6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/UPGRADING.md b/UPGRADING.md index dd7c2f3c..89513ddd 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -17,10 +17,10 @@ The 2.0.0 release requires Python 3.6+. Methods expect request objects. We provide a script that will convert most common use cases. -* Install the library +* Install the library with `libcst`. ```py -python3 -m pip install google-cloud-dataproc +python3 -m pip install google-cloud-dataproc[libcst] ``` * The script `fixup_dataproc_v1_keywords.py` is shipped with the library. It expects an input directory (with the code to convert) and an empty destination directory. @@ -165,4 +165,4 @@ location_path = f'projects/{project}/locations/{location}' region_path = f'projects/{project}/regions/{region}' workflow_template_path = f'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' autoscaling_policy_path = f'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' -``` \ No newline at end of file +``` diff --git a/docs/dataproc_v1/autoscaling_policy_service.rst b/docs/dataproc_v1/autoscaling_policy_service.rst new file mode 100644 index 00000000..cbb62baa --- /dev/null +++ b/docs/dataproc_v1/autoscaling_policy_service.rst @@ -0,0 +1,11 @@ +AutoscalingPolicyService +------------------------------------------ + +.. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers + :members: + :inherited-members: diff --git a/docs/dataproc_v1/cluster_controller.rst b/docs/dataproc_v1/cluster_controller.rst new file mode 100644 index 00000000..1c4e398b --- /dev/null +++ b/docs/dataproc_v1/cluster_controller.rst @@ -0,0 +1,11 @@ +ClusterController +----------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.cluster_controller + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1.services.cluster_controller.pagers + :members: + :inherited-members: diff --git a/docs/dataproc_v1/job_controller.rst b/docs/dataproc_v1/job_controller.rst new file mode 100644 index 00000000..e73db167 --- /dev/null +++ b/docs/dataproc_v1/job_controller.rst @@ -0,0 +1,11 @@ +JobController +------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.job_controller + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1.services.job_controller.pagers + :members: + :inherited-members: diff --git a/docs/dataproc_v1/services.rst b/docs/dataproc_v1/services.rst index 8c7fc841..9d91e7ce 100644 --- a/docs/dataproc_v1/services.rst +++ b/docs/dataproc_v1/services.rst @@ -1,15 +1,9 @@ Services for Google Cloud Dataproc v1 API ========================================= +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service - :members: - :inherited-members: -.. automodule:: google.cloud.dataproc_v1.services.cluster_controller - :members: - :inherited-members: -.. automodule:: google.cloud.dataproc_v1.services.job_controller - :members: - :inherited-members: -.. automodule:: google.cloud.dataproc_v1.services.workflow_template_service - :members: - :inherited-members: + autoscaling_policy_service + cluster_controller + job_controller + workflow_template_service diff --git a/docs/dataproc_v1/types.rst b/docs/dataproc_v1/types.rst index 5dde0cd6..bc1a0a30 100644 --- a/docs/dataproc_v1/types.rst +++ b/docs/dataproc_v1/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Dataproc v1 API .. automodule:: google.cloud.dataproc_v1.types :members: + :undoc-members: :show-inheritance: diff --git a/docs/dataproc_v1/workflow_template_service.rst b/docs/dataproc_v1/workflow_template_service.rst new file mode 100644 index 00000000..154de462 --- /dev/null +++ b/docs/dataproc_v1/workflow_template_service.rst @@ -0,0 +1,11 @@ +WorkflowTemplateService +----------------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.workflow_template_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1.services.workflow_template_service.pagers + :members: + :inherited-members: diff --git a/docs/dataproc_v1beta2/autoscaling_policy_service.rst b/docs/dataproc_v1beta2/autoscaling_policy_service.rst new file mode 100644 index 00000000..3a411371 --- /dev/null +++ b/docs/dataproc_v1beta2/autoscaling_policy_service.rst @@ -0,0 +1,11 @@ +AutoscalingPolicyService +------------------------------------------ + +.. automodule:: google.cloud.dataproc_v1beta2.services.autoscaling_policy_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.pagers + :members: + :inherited-members: diff --git a/docs/dataproc_v1beta2/cluster_controller.rst b/docs/dataproc_v1beta2/cluster_controller.rst new file mode 100644 index 00000000..c10e78c7 --- /dev/null +++ b/docs/dataproc_v1beta2/cluster_controller.rst @@ -0,0 +1,11 @@ +ClusterController +----------------------------------- + +.. automodule:: google.cloud.dataproc_v1beta2.services.cluster_controller + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1beta2.services.cluster_controller.pagers + :members: + :inherited-members: diff --git a/docs/dataproc_v1beta2/job_controller.rst b/docs/dataproc_v1beta2/job_controller.rst new file mode 100644 index 00000000..3f5d74e1 --- /dev/null +++ b/docs/dataproc_v1beta2/job_controller.rst @@ -0,0 +1,11 @@ +JobController +------------------------------- + +.. automodule:: google.cloud.dataproc_v1beta2.services.job_controller + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1beta2.services.job_controller.pagers + :members: + :inherited-members: diff --git a/docs/dataproc_v1beta2/services.rst b/docs/dataproc_v1beta2/services.rst index 273b2def..23c2d640 100644 --- a/docs/dataproc_v1beta2/services.rst +++ b/docs/dataproc_v1beta2/services.rst @@ -1,15 +1,9 @@ Services for Google Cloud Dataproc v1beta2 API ============================================== +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.dataproc_v1beta2.services.autoscaling_policy_service - :members: - :inherited-members: -.. automodule:: google.cloud.dataproc_v1beta2.services.cluster_controller - :members: - :inherited-members: -.. automodule:: google.cloud.dataproc_v1beta2.services.job_controller - :members: - :inherited-members: -.. automodule:: google.cloud.dataproc_v1beta2.services.workflow_template_service - :members: - :inherited-members: + autoscaling_policy_service + cluster_controller + job_controller + workflow_template_service diff --git a/docs/dataproc_v1beta2/types.rst b/docs/dataproc_v1beta2/types.rst index e3dba489..1358e4c1 100644 --- a/docs/dataproc_v1beta2/types.rst +++ b/docs/dataproc_v1beta2/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Dataproc v1beta2 API .. automodule:: google.cloud.dataproc_v1beta2.types :members: + :undoc-members: :show-inheritance: diff --git a/docs/dataproc_v1beta2/workflow_template_service.rst b/docs/dataproc_v1beta2/workflow_template_service.rst new file mode 100644 index 00000000..10a2826c --- /dev/null +++ b/docs/dataproc_v1beta2/workflow_template_service.rst @@ -0,0 +1,11 @@ +WorkflowTemplateService +----------------------------------------- + +.. automodule:: google.cloud.dataproc_v1beta2.services.workflow_template_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.dataproc_v1beta2.services.workflow_template_service.pagers + :members: + :inherited-members: diff --git a/google/cloud/dataproc_v1/__init__.py b/google/cloud/dataproc_v1/__init__.py index d248a17a..82d780ab 100644 --- a/google/cloud/dataproc_v1/__init__.py +++ b/google/cloud/dataproc_v1/__init__.py @@ -109,6 +109,7 @@ "AcceleratorConfig", "AutoscalingConfig", "AutoscalingPolicy", + "AutoscalingPolicyServiceClient", "BasicAutoscalingAlgorithm", "BasicYarnAutoscalingConfig", "CancelJobRequest", @@ -191,7 +192,6 @@ "WorkflowNode", "WorkflowTemplate", "WorkflowTemplatePlacement", - "WorkflowTemplateServiceClient", "YarnApplication", - "AutoscalingPolicyServiceClient", + "WorkflowTemplateServiceClient", ) diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py index fa91a7e7..52d43220 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -86,7 +86,36 @@ class AutoscalingPolicyServiceAsyncClient: AutoscalingPolicyServiceClient.parse_common_location_path ) - from_service_account_file = AutoscalingPolicyServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceAsyncClient: The constructed client. + """ + return AutoscalingPolicyServiceClient.from_service_account_info.__func__(AutoscalingPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceAsyncClient: The constructed client. + """ + return AutoscalingPolicyServiceClient.from_service_account_file.__func__(AutoscalingPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -164,7 +193,7 @@ async def create_autoscaling_policy( r"""Creates new autoscaling policy. Args: - request (:class:`~.autoscaling_policies.CreateAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest`): The request object. A request to create an autoscaling policy. parent (:class:`str`): @@ -181,12 +210,14 @@ async def create_autoscaling_policy( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (:class:`google.cloud.dataproc_v1.types.AutoscalingPolicy`): Required. The autoscaling policy to create. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -198,7 +229,7 @@ async def create_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -258,12 +289,13 @@ async def update_autoscaling_policy( replacements. Args: - request (:class:`~.autoscaling_policies.UpdateAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest`): The request object. A request to update an autoscaling policy. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (:class:`google.cloud.dataproc_v1.types.AutoscalingPolicy`): Required. The updated autoscaling policy. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -275,7 +307,7 @@ async def update_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -309,6 +341,7 @@ async def update_autoscaling_policy( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -340,7 +373,7 @@ async def get_autoscaling_policy( r"""Retrieves autoscaling policy. Args: - request (:class:`~.autoscaling_policies.GetAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest`): The request object. A request to fetch an autoscaling policy. name (:class:`str`): @@ -356,6 +389,7 @@ async def get_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -367,7 +401,7 @@ async def get_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -401,6 +435,7 @@ async def get_autoscaling_policy( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -430,7 +465,7 @@ async def list_autoscaling_policies( r"""Lists autoscaling policies in the project. Args: - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (:class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest`): The request object. A request to list autoscaling policies in a project. parent (:class:`str`): @@ -446,6 +481,7 @@ async def list_autoscaling_policies( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -457,7 +493,7 @@ async def list_autoscaling_policies( sent along with the request as metadata. Returns: - ~.pagers.ListAutoscalingPoliciesAsyncPager: + google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesAsyncPager: A response to a request to list autoscaling policies in a project. Iterating over this object will yield @@ -494,6 +530,7 @@ async def list_autoscaling_policies( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -531,7 +568,7 @@ async def delete_autoscaling_policy( more clusters. Args: - request (:class:`~.autoscaling_policies.DeleteAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest`): The request object. A request to delete an autoscaling policy. Autoscaling policies in use by one or more clusters will @@ -551,6 +588,7 @@ async def delete_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index 1551d1f4..83bce7d3 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -114,6 +114,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -126,7 +142,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + AutoscalingPolicyServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -236,10 +252,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.AutoscalingPolicyServiceTransport]): The + transport (Union[str, AutoscalingPolicyServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -275,21 +291,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -332,7 +344,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -350,10 +362,10 @@ def create_autoscaling_policy( r"""Creates new autoscaling policy. Args: - request (:class:`~.autoscaling_policies.CreateAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest): The request object. A request to create an autoscaling policy. - parent (:class:`str`): + parent (str): Required. The "resource name" of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -367,12 +379,14 @@ def create_autoscaling_policy( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): Required. The autoscaling policy to create. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -384,7 +398,7 @@ def create_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -447,12 +461,13 @@ def update_autoscaling_policy( replacements. Args: - request (:class:`~.autoscaling_policies.UpdateAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest): The request object. A request to update an autoscaling policy. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): Required. The updated autoscaling policy. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -464,7 +479,7 @@ def update_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -524,10 +539,10 @@ def get_autoscaling_policy( r"""Retrieves autoscaling policy. Args: - request (:class:`~.autoscaling_policies.GetAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest): The request object. A request to fetch an autoscaling policy. - name (:class:`str`): + name (str): Required. The "resource name" of the autoscaling policy, as described in https://cloud.google.com/apis/design/resource_names. @@ -540,6 +555,7 @@ def get_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -551,7 +567,7 @@ def get_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -607,10 +623,10 @@ def list_autoscaling_policies( r"""Lists autoscaling policies in the project. Args: - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest): The request object. A request to list autoscaling policies in a project. - parent (:class:`str`): + parent (str): Required. The "resource name" of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -623,6 +639,7 @@ def list_autoscaling_policies( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -634,7 +651,7 @@ def list_autoscaling_policies( sent along with the request as metadata. Returns: - ~.pagers.ListAutoscalingPoliciesPager: + google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesPager: A response to a request to list autoscaling policies in a project. Iterating over this object will yield @@ -703,12 +720,12 @@ def delete_autoscaling_policy( more clusters. Args: - request (:class:`~.autoscaling_policies.DeleteAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest): The request object. A request to delete an autoscaling policy. Autoscaling policies in use by one or more clusters will not be deleted. - name (:class:`str`): + name (str): Required. The "resource name" of the autoscaling policy, as described in https://cloud.google.com/apis/design/resource_names. @@ -723,6 +740,7 @@ def delete_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py index a246d08f..85deb317 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1.types import autoscaling_policies @@ -24,7 +33,7 @@ class ListAutoscalingPoliciesPager: """A pager for iterating through ``list_autoscaling_policies`` requests. This class thinly wraps an initial - :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` object, and provides an ``__iter__`` method to iterate through its ``policies`` field. @@ -33,7 +42,7 @@ class ListAutoscalingPoliciesPager: through the ``policies`` field on the corresponding responses. - All the usual :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest): The initial request object. - response (:class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse`): + response (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListAutoscalingPoliciesAsyncPager: """A pager for iterating through ``list_autoscaling_policies`` requests. This class thinly wraps an initial - :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` object, and provides an ``__aiter__`` method to iterate through its ``policies`` field. @@ -95,7 +104,7 @@ class ListAutoscalingPoliciesAsyncPager: through the ``policies`` field on the corresponding responses. - All the usual :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -115,9 +124,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest): The initial request object. - response (:class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse`): + response (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py index 46201f4b..6fcb1442 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py @@ -68,10 +68,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -79,6 +79,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -88,20 +91,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -119,6 +119,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -132,6 +133,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -145,6 +147,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py index 736d0870..e1df740b 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py @@ -59,6 +59,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -89,6 +90,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -103,72 +108,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -176,17 +169,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -200,7 +184,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py index dc52d94d..f098fe12 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -63,7 +63,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -103,6 +103,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -134,12 +135,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -148,72 +153,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -221,17 +214,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index 75a59d2d..924c35f0 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -80,7 +80,36 @@ class ClusterControllerAsyncClient: ClusterControllerClient.parse_common_location_path ) - from_service_account_file = ClusterControllerClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerAsyncClient: The constructed client. + """ + return ClusterControllerClient.from_service_account_info.__func__(ClusterControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerAsyncClient: The constructed client. + """ + return ClusterControllerClient.from_service_account_file.__func__(ClusterControllerAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -161,22 +190,24 @@ async def create_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.CreateClusterRequest`): + request (:class:`google.cloud.dataproc_v1.types.CreateClusterRequest`): The request object. A request to create a cluster. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (:class:`google.cloud.dataproc_v1.types.Cluster`): Required. The cluster to create. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this @@ -189,13 +220,11 @@ async def create_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -229,6 +258,7 @@ async def create_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -267,17 +297,19 @@ async def update_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.UpdateClusterRequest`): + request (:class:`google.cloud.dataproc_v1.types.UpdateClusterRequest`): The request object. A request to update a cluster. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -286,12 +318,12 @@ async def update_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (:class:`google.cloud.dataproc_v1.types.Cluster`): Required. The changes to the cluster. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers in a cluster to 5, the ``update_mask`` @@ -354,6 +386,7 @@ async def update_cluster( + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -365,13 +398,11 @@ async def update_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -411,6 +442,7 @@ async def update_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -447,18 +479,20 @@ async def delete_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.DeleteClusterRequest`): + request (:class:`google.cloud.dataproc_v1.types.DeleteClusterRequest`): The request object. A request to delete a cluster. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -475,24 +509,22 @@ async def delete_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -526,6 +558,7 @@ async def delete_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -560,19 +593,21 @@ async def get_cluster( project. Args: - request (:class:`~.clusters.GetClusterRequest`): + request (:class:`google.cloud.dataproc_v1.types.GetClusterRequest`): The request object. Request to get the resource representation for a cluster in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -589,7 +624,7 @@ async def get_cluster( sent along with the request as metadata. Returns: - ~.clusters.Cluster: + google.cloud.dataproc_v1.types.Cluster: Describes the identifying information, config, and status of a cluster of Compute Engine instances. @@ -630,6 +665,7 @@ async def get_cluster( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -656,19 +692,21 @@ async def list_clusters( alphabetically. Args: - request (:class:`~.clusters.ListClustersRequest`): + request (:class:`google.cloud.dataproc_v1.types.ListClustersRequest`): The request object. A request to list the clusters in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -696,6 +734,7 @@ async def list_clusters( status.state = ACTIVE AND clusterName = mycluster AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -707,7 +746,7 @@ async def list_clusters( sent along with the request as metadata. Returns: - ~.pagers.ListClustersAsyncPager: + google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersAsyncPager: The list of all clusters in a project. Iterating over this object will yield @@ -750,6 +789,7 @@ async def list_clusters( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -788,19 +828,21 @@ async def diagnose_cluster( `DiagnoseClusterResults `__. Args: - request (:class:`~.clusters.DiagnoseClusterRequest`): + request (:class:`google.cloud.dataproc_v1.types.DiagnoseClusterRequest`): The request object. A request to collect cluster diagnostic information. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -817,12 +859,12 @@ async def diagnose_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.clusters.DiagnoseClusterResults``: The - location of diagnostic output. + :class:`google.cloud.dataproc_v1.types.DiagnoseClusterResults` + The location of diagnostic output. """ # Create or coerce a protobuf request object. @@ -856,6 +898,7 @@ async def diagnose_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/dataproc_v1/services/cluster_controller/client.py b/google/cloud/dataproc_v1/services/cluster_controller/client.py index 42594c47..daef141a 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -119,6 +119,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -131,7 +147,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + ClusterControllerClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -223,10 +239,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ClusterControllerTransport]): The + transport (Union[str, ClusterControllerTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -262,21 +278,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -319,7 +331,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -341,22 +353,24 @@ def create_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.CreateClusterRequest`): + request (google.cloud.dataproc_v1.types.CreateClusterRequest): The request object. A request to create a cluster. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (google.cloud.dataproc_v1.types.Cluster): Required. The cluster to create. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this @@ -369,13 +383,11 @@ def create_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -442,31 +454,33 @@ def update_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.UpdateClusterRequest`): + request (google.cloud.dataproc_v1.types.UpdateClusterRequest): The request object. A request to update a cluster. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (google.cloud.dataproc_v1.types.Cluster): Required. The changes to the cluster. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers in a cluster to 5, the ``update_mask`` @@ -529,6 +543,7 @@ def update_cluster( + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -540,13 +555,11 @@ def update_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -617,22 +630,24 @@ def delete_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.DeleteClusterRequest`): + request (google.cloud.dataproc_v1.types.DeleteClusterRequest): The request object. A request to delete a cluster. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -645,24 +660,22 @@ def delete_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -725,23 +738,25 @@ def get_cluster( project. Args: - request (:class:`~.clusters.GetClusterRequest`): + request (google.cloud.dataproc_v1.types.GetClusterRequest): The request object. Request to get the resource representation for a cluster in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -754,7 +769,7 @@ def get_cluster( sent along with the request as metadata. Returns: - ~.clusters.Cluster: + google.cloud.dataproc_v1.types.Cluster: Describes the identifying information, config, and status of a cluster of Compute Engine instances. @@ -812,23 +827,25 @@ def list_clusters( alphabetically. Args: - request (:class:`~.clusters.ListClustersRequest`): + request (google.cloud.dataproc_v1.types.ListClustersRequest): The request object. A request to list the clusters in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - filter (:class:`str`): + filter (str): Optional. A filter constraining the clusters to list. Filters are case-sensitive and have the following syntax: @@ -852,6 +869,7 @@ def list_clusters( status.state = ACTIVE AND clusterName = mycluster AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -863,7 +881,7 @@ def list_clusters( sent along with the request as metadata. Returns: - ~.pagers.ListClustersPager: + google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersPager: The list of all clusters in a project. Iterating over this object will yield @@ -935,23 +953,25 @@ def diagnose_cluster( `DiagnoseClusterResults `__. Args: - request (:class:`~.clusters.DiagnoseClusterRequest`): + request (google.cloud.dataproc_v1.types.DiagnoseClusterRequest): The request object. A request to collect cluster diagnostic information. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -964,12 +984,12 @@ def diagnose_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.clusters.DiagnoseClusterResults``: The - location of diagnostic output. + :class:`google.cloud.dataproc_v1.types.DiagnoseClusterResults` + The location of diagnostic output. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/dataproc_v1/services/cluster_controller/pagers.py b/google/cloud/dataproc_v1/services/cluster_controller/pagers.py index c5f0fbdf..418c92b1 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/pagers.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1.types import clusters @@ -24,7 +33,7 @@ class ListClustersPager: """A pager for iterating through ``list_clusters`` requests. This class thinly wraps an initial - :class:`~.clusters.ListClustersResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListClustersResponse` object, and provides an ``__iter__`` method to iterate through its ``clusters`` field. @@ -33,7 +42,7 @@ class ListClustersPager: through the ``clusters`` field on the corresponding responses. - All the usual :class:`~.clusters.ListClustersResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListClustersResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.clusters.ListClustersRequest`): + request (google.cloud.dataproc_v1.types.ListClustersRequest): The initial request object. - response (:class:`~.clusters.ListClustersResponse`): + response (google.cloud.dataproc_v1.types.ListClustersResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListClustersAsyncPager: """A pager for iterating through ``list_clusters`` requests. This class thinly wraps an initial - :class:`~.clusters.ListClustersResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListClustersResponse` object, and provides an ``__aiter__`` method to iterate through its ``clusters`` field. @@ -95,7 +104,7 @@ class ListClustersAsyncPager: through the ``clusters`` field on the corresponding responses. - All the usual :class:`~.clusters.ListClustersResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListClustersResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.clusters.ListClustersRequest`): + request (google.cloud.dataproc_v1.types.ListClustersRequest): The initial request object. - response (:class:`~.clusters.ListClustersResponse`): + response (google.cloud.dataproc_v1.types.ListClustersResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py index caccd04e..3583488a 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -80,6 +80,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -89,20 +92,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -113,6 +113,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -124,6 +125,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -135,6 +137,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -150,6 +153,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -165,6 +169,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -176,6 +181,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py index ae1b8d78..f8c3e60a 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -90,6 +91,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -104,72 +109,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -177,18 +171,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -202,7 +186,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py index b3b50cf4..e27a348b 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py @@ -64,7 +64,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -104,6 +104,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -135,12 +136,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -149,72 +154,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -222,18 +216,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1/services/job_controller/async_client.py b/google/cloud/dataproc_v1/services/job_controller/async_client.py index 8eaf753e..cc5d6522 100644 --- a/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -75,7 +75,36 @@ class JobControllerAsyncClient: JobControllerClient.parse_common_location_path ) - from_service_account_file = JobControllerClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerAsyncClient: The constructed client. + """ + return JobControllerClient.from_service_account_info.__func__(JobControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerAsyncClient: The constructed client. + """ + return JobControllerClient.from_service_account_file.__func__(JobControllerAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -153,22 +182,24 @@ async def submit_job( r"""Submits a job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (:class:`google.cloud.dataproc_v1.types.SubmitJobRequest`): The request object. A request to submit a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (:class:`google.cloud.dataproc_v1.types.Job`): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -181,7 +212,7 @@ async def submit_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -215,6 +246,7 @@ async def submit_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -240,22 +272,24 @@ async def submit_job_as_operation( r"""Submits job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (:class:`google.cloud.dataproc_v1.types.SubmitJobRequest`): The request object. A request to submit a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (:class:`google.cloud.dataproc_v1.types.Job`): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -268,11 +302,12 @@ async def submit_job_as_operation( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.jobs.Job``: A Dataproc job resource. + :class:`google.cloud.dataproc_v1.types.Job` A Dataproc + job resource. """ # Create or coerce a protobuf request object. @@ -306,6 +341,7 @@ async def submit_job_as_operation( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -340,19 +376,21 @@ async def get_job( project. Args: - request (:class:`~.jobs.GetJobRequest`): + request (:class:`google.cloud.dataproc_v1.types.GetJobRequest`): The request object. A request to get the resource representation for a job in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -369,7 +407,7 @@ async def get_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -407,6 +445,7 @@ async def get_job( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -432,18 +471,20 @@ async def list_jobs( r"""Lists regions/{region}/jobs in a project. Args: - request (:class:`~.jobs.ListJobsRequest`): + request (:class:`google.cloud.dataproc_v1.types.ListJobsRequest`): The request object. A request to list jobs in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -465,6 +506,7 @@ async def list_jobs( status.state = ACTIVE AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -476,7 +518,7 @@ async def list_jobs( sent along with the request as metadata. Returns: - ~.pagers.ListJobsAsyncPager: + google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsAsyncPager: A list of jobs in a project. Iterating over this object will yield results and resolve additional pages @@ -518,6 +560,7 @@ async def list_jobs( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -546,7 +589,7 @@ async def update_job( r"""Updates a job in a project. Args: - request (:class:`~.jobs.UpdateJobRequest`): + request (:class:`google.cloud.dataproc_v1.types.UpdateJobRequest`): The request object. A request to update a job. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -556,7 +599,7 @@ async def update_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -572,6 +615,7 @@ async def update_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -601,18 +645,20 @@ async def cancel_job( `regions/{region}/jobs.get `__. Args: - request (:class:`~.jobs.CancelJobRequest`): + request (:class:`google.cloud.dataproc_v1.types.CancelJobRequest`): The request object. A request to cancel a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -629,7 +675,7 @@ async def cancel_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -667,6 +713,7 @@ async def cancel_job( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -693,18 +740,20 @@ async def delete_job( delete fails, and the response returns ``FAILED_PRECONDITION``. Args: - request (:class:`~.jobs.DeleteJobRequest`): + request (:class:`google.cloud.dataproc_v1.types.DeleteJobRequest`): The request object. A request to delete a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -751,6 +800,7 @@ async def delete_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/dataproc_v1/services/job_controller/client.py b/google/cloud/dataproc_v1/services/job_controller/client.py index d101e833..92d3a67e 100644 --- a/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/google/cloud/dataproc_v1/services/job_controller/client.py @@ -110,6 +110,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -122,7 +138,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + JobControllerClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -214,10 +230,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.JobControllerTransport]): The + transport (Union[str, JobControllerTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -253,21 +269,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -310,7 +322,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -329,22 +341,24 @@ def submit_job( r"""Submits a job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (google.cloud.dataproc_v1.types.SubmitJobRequest): The request object. A request to submit a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (google.cloud.dataproc_v1.types.Job): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -357,7 +371,7 @@ def submit_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -411,22 +425,24 @@ def submit_job_as_operation( r"""Submits job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (google.cloud.dataproc_v1.types.SubmitJobRequest): The request object. A request to submit a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (google.cloud.dataproc_v1.types.Job): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -439,11 +455,12 @@ def submit_job_as_operation( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.jobs.Job``: A Dataproc job resource. + :class:`google.cloud.dataproc_v1.types.Job` A Dataproc + job resource. """ # Create or coerce a protobuf request object. @@ -506,23 +523,25 @@ def get_job( project. Args: - request (:class:`~.jobs.GetJobRequest`): + request (google.cloud.dataproc_v1.types.GetJobRequest): The request object. A request to get the resource representation for a job in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job_id (:class:`str`): + job_id (str): Required. The job ID. This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -535,7 +554,7 @@ def get_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -589,22 +608,24 @@ def list_jobs( r"""Lists regions/{region}/jobs in a project. Args: - request (:class:`~.jobs.ListJobsRequest`): + request (google.cloud.dataproc_v1.types.ListJobsRequest): The request object. A request to list jobs in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - filter (:class:`str`): + filter (str): Optional. A filter constraining the jobs to list. Filters are case-sensitive and have the following syntax: @@ -622,6 +643,7 @@ def list_jobs( status.state = ACTIVE AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -633,7 +655,7 @@ def list_jobs( sent along with the request as metadata. Returns: - ~.pagers.ListJobsPager: + google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsPager: A list of jobs in a project. Iterating over this object will yield results and resolve additional pages @@ -694,7 +716,7 @@ def update_job( r"""Updates a job in a project. Args: - request (:class:`~.jobs.UpdateJobRequest`): + request (google.cloud.dataproc_v1.types.UpdateJobRequest): The request object. A request to update a job. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -704,7 +726,7 @@ def update_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -744,22 +766,24 @@ def cancel_job( `regions/{region}/jobs.get `__. Args: - request (:class:`~.jobs.CancelJobRequest`): + request (google.cloud.dataproc_v1.types.CancelJobRequest): The request object. A request to cancel a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job_id (:class:`str`): + job_id (str): Required. The job ID. This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -772,7 +796,7 @@ def cancel_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -827,22 +851,24 @@ def delete_job( delete fails, and the response returns ``FAILED_PRECONDITION``. Args: - request (:class:`~.jobs.DeleteJobRequest`): + request (google.cloud.dataproc_v1.types.DeleteJobRequest): The request object. A request to delete a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job_id (:class:`str`): + job_id (str): Required. The job ID. This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/dataproc_v1/services/job_controller/pagers.py b/google/cloud/dataproc_v1/services/job_controller/pagers.py index 185f0ace..77ae8be8 100644 --- a/google/cloud/dataproc_v1/services/job_controller/pagers.py +++ b/google/cloud/dataproc_v1/services/job_controller/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1.types import jobs @@ -24,7 +33,7 @@ class ListJobsPager: """A pager for iterating through ``list_jobs`` requests. This class thinly wraps an initial - :class:`~.jobs.ListJobsResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListJobsResponse` object, and provides an ``__iter__`` method to iterate through its ``jobs`` field. @@ -33,7 +42,7 @@ class ListJobsPager: through the ``jobs`` field on the corresponding responses. - All the usual :class:`~.jobs.ListJobsResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListJobsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.jobs.ListJobsRequest`): + request (google.cloud.dataproc_v1.types.ListJobsRequest): The initial request object. - response (:class:`~.jobs.ListJobsResponse`): + response (google.cloud.dataproc_v1.types.ListJobsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListJobsAsyncPager: """A pager for iterating through ``list_jobs`` requests. This class thinly wraps an initial - :class:`~.jobs.ListJobsResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListJobsResponse` object, and provides an ``__aiter__`` method to iterate through its ``jobs`` field. @@ -95,7 +104,7 @@ class ListJobsAsyncPager: through the ``jobs`` field on the corresponding responses. - All the usual :class:`~.jobs.ListJobsResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListJobsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.jobs.ListJobsRequest`): + request (google.cloud.dataproc_v1.types.ListJobsRequest): The initial request object. - response (:class:`~.jobs.ListJobsResponse`): + response (google.cloud.dataproc_v1.types.ListJobsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/base.py b/google/cloud/dataproc_v1/services/job_controller/transports/base.py index c8538dd1..15bf4766 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/base.py @@ -70,10 +70,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -81,6 +81,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -90,20 +93,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -114,6 +114,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -125,6 +126,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -140,6 +142,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -155,6 +158,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -166,6 +170,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -181,6 +186,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -192,6 +198,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py index 5802abf7..9842af0e 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -90,6 +91,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -104,72 +109,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -177,18 +171,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -202,7 +186,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py index 04011df0..e9b2d197 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py @@ -64,7 +64,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -104,6 +104,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -135,12 +136,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -149,72 +154,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -222,18 +216,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index 44cb69b3..cbfafedc 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -90,7 +90,36 @@ class WorkflowTemplateServiceAsyncClient: WorkflowTemplateServiceClient.parse_common_location_path ) - from_service_account_file = WorkflowTemplateServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceAsyncClient: The constructed client. + """ + return WorkflowTemplateServiceClient.from_service_account_info.__func__(WorkflowTemplateServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceAsyncClient: The constructed client. + """ + return WorkflowTemplateServiceClient.from_service_account_file.__func__(WorkflowTemplateServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -168,7 +197,7 @@ async def create_workflow_template( r"""Creates new workflow template. Args: - request (:class:`~.workflow_templates.CreateWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest`): The request object. A request to create a workflow template. parent (:class:`str`): @@ -184,12 +213,14 @@ async def create_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): Required. The Dataproc workflow template to create. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -201,7 +232,7 @@ async def create_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -235,6 +266,7 @@ async def create_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -266,7 +298,7 @@ async def get_workflow_template( specifying optional version parameter. Args: - request (:class:`~.workflow_templates.GetWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest`): The request object. A request to fetch a workflow template. name (:class:`str`): @@ -283,6 +315,7 @@ async def get_workflow_template( resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -294,7 +327,7 @@ async def get_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -330,6 +363,7 @@ async def get_workflow_template( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -382,7 +416,7 @@ async def instantiate_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest`): The request object. A request to instantiate a workflow template. name (:class:`str`): @@ -401,14 +435,16 @@ async def instantiate_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`Sequence[~.workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry]`): + parameters (:class:`Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]`): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -420,24 +456,22 @@ async def instantiate_workflow_template( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -470,6 +504,7 @@ async def instantiate_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -533,7 +568,7 @@ async def instantiate_inline_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateInlineWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest`): The request object. A request to instantiate an inline workflow template. parent (:class:`str`): @@ -551,12 +586,14 @@ async def instantiate_inline_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): Required. The workflow template to instantiate. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -568,24 +605,22 @@ async def instantiate_inline_workflow_template( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -617,6 +652,7 @@ async def instantiate_inline_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -656,14 +692,15 @@ async def update_workflow_template( server version. Args: - request (:class:`~.workflow_templates.UpdateWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest`): The request object. A request to update a workflow template. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): Required. The updated workflow template. The ``template.version`` field must match the current version. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -675,7 +712,7 @@ async def update_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -707,6 +744,7 @@ async def update_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -739,7 +777,7 @@ async def list_workflow_templates( the request. Args: - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (:class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest`): The request object. A request to list workflow templates in a project. parent (:class:`str`): @@ -755,6 +793,7 @@ async def list_workflow_templates( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -766,7 +805,7 @@ async def list_workflow_templates( sent along with the request as metadata. Returns: - ~.pagers.ListWorkflowTemplatesAsyncPager: + google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesAsyncPager: A response to a request to list workflow templates in a project. Iterating over this object will yield @@ -805,6 +844,7 @@ async def list_workflow_templates( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -841,7 +881,7 @@ async def delete_workflow_template( rogress workflows. Args: - request (:class:`~.workflow_templates.DeleteWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest`): The request object. A request to delete a workflow template. Currently started workflows will remain running. @@ -860,6 +900,7 @@ async def delete_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -897,6 +938,7 @@ async def delete_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 73a5626b..bb0be312 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -118,6 +118,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -130,7 +146,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + WorkflowTemplateServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -240,10 +256,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.WorkflowTemplateServiceTransport]): The + transport (Union[str, WorkflowTemplateServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -279,21 +295,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -336,7 +348,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -354,10 +366,10 @@ def create_workflow_template( r"""Creates new workflow template. Args: - request (:class:`~.workflow_templates.CreateWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest): The request object. A request to create a workflow template. - parent (:class:`str`): + parent (str): Required. The resource name of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -370,12 +382,14 @@ def create_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (google.cloud.dataproc_v1.types.WorkflowTemplate): Required. The Dataproc workflow template to create. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -387,7 +401,7 @@ def create_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -447,10 +461,10 @@ def get_workflow_template( specifying optional version parameter. Args: - request (:class:`~.workflow_templates.GetWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest): The request object. A request to fetch a workflow template. - name (:class:`str`): + name (str): Required. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. @@ -464,6 +478,7 @@ def get_workflow_template( resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -475,7 +490,7 @@ def get_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -554,10 +569,10 @@ def instantiate_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest): The request object. A request to instantiate a workflow template. - name (:class:`str`): + name (str): Required. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. @@ -573,14 +588,16 @@ def instantiate_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`Sequence[~.workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry]`): + parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -592,24 +609,22 @@ def instantiate_workflow_template( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -636,9 +651,8 @@ def instantiate_workflow_template( if name is not None: request.name = name - - if parameters: - request.parameters.update(parameters) + if parameters is not None: + request.parameters = parameters # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -704,10 +718,10 @@ def instantiate_inline_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateInlineWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest): The request object. A request to instantiate an inline workflow template. - parent (:class:`str`): + parent (str): Required. The resource name of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -722,12 +736,14 @@ def instantiate_inline_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (google.cloud.dataproc_v1.types.WorkflowTemplate): Required. The workflow template to instantiate. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -739,24 +755,22 @@ def instantiate_inline_workflow_template( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -828,14 +842,15 @@ def update_workflow_template( server version. Args: - request (:class:`~.workflow_templates.UpdateWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest): The request object. A request to update a workflow template. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (google.cloud.dataproc_v1.types.WorkflowTemplate): Required. The updated workflow template. The ``template.version`` field must match the current version. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -847,7 +862,7 @@ def update_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -906,10 +921,10 @@ def list_workflow_templates( the request. Args: - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest): The request object. A request to list workflow templates in a project. - parent (:class:`str`): + parent (str): Required. The resource name of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -922,6 +937,7 @@ def list_workflow_templates( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -933,7 +949,7 @@ def list_workflow_templates( sent along with the request as metadata. Returns: - ~.pagers.ListWorkflowTemplatesPager: + google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesPager: A response to a request to list workflow templates in a project. Iterating over this object will yield @@ -999,11 +1015,11 @@ def delete_workflow_template( rogress workflows. Args: - request (:class:`~.workflow_templates.DeleteWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest): The request object. A request to delete a workflow template. Currently started workflows will remain running. - name (:class:`str`): + name (str): Required. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. @@ -1018,6 +1034,7 @@ def delete_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py b/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py index 86a35f48..90fa03f2 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1.types import workflow_templates @@ -24,7 +33,7 @@ class ListWorkflowTemplatesPager: """A pager for iterating through ``list_workflow_templates`` requests. This class thinly wraps an initial - :class:`~.workflow_templates.ListWorkflowTemplatesResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` object, and provides an ``__iter__`` method to iterate through its ``templates`` field. @@ -33,7 +42,7 @@ class ListWorkflowTemplatesPager: through the ``templates`` field on the corresponding responses. - All the usual :class:`~.workflow_templates.ListWorkflowTemplatesResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest): The initial request object. - response (:class:`~.workflow_templates.ListWorkflowTemplatesResponse`): + response (google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListWorkflowTemplatesAsyncPager: """A pager for iterating through ``list_workflow_templates`` requests. This class thinly wraps an initial - :class:`~.workflow_templates.ListWorkflowTemplatesResponse` object, and + :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` object, and provides an ``__aiter__`` method to iterate through its ``templates`` field. @@ -95,7 +104,7 @@ class ListWorkflowTemplatesAsyncPager: through the ``templates`` field on the corresponding responses. - All the usual :class:`~.workflow_templates.ListWorkflowTemplatesResponse` + All the usual :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -115,9 +124,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest): The initial request object. - response (:class:`~.workflow_templates.ListWorkflowTemplatesResponse`): + response (google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py index 967002f5..bded001b 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py @@ -70,10 +70,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -81,6 +81,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -90,20 +93,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -114,6 +114,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -129,6 +130,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -140,6 +142,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -151,6 +154,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -162,6 +166,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -177,6 +182,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -188,6 +194,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py index 98d84293..e2bbf535 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -91,6 +92,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -105,72 +110,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -178,18 +172,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -203,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py index 1024ab1b..1f93da89 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py @@ -65,7 +65,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -105,6 +105,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -136,12 +137,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -150,72 +155,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -223,18 +217,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1/types/__init__.py b/google/cloud/dataproc_v1/types/__init__.py index 3c02d690..d79923a4 100644 --- a/google/cloud/dataproc_v1/types/__init__.py +++ b/google/cloud/dataproc_v1/types/__init__.py @@ -19,179 +19,179 @@ AutoscalingPolicy, BasicAutoscalingAlgorithm, BasicYarnAutoscalingConfig, - InstanceGroupAutoscalingPolicyConfig, CreateAutoscalingPolicyRequest, - GetAutoscalingPolicyRequest, - UpdateAutoscalingPolicyRequest, DeleteAutoscalingPolicyRequest, + GetAutoscalingPolicyRequest, + InstanceGroupAutoscalingPolicyConfig, ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse, + UpdateAutoscalingPolicyRequest, ) from .clusters import ( + AcceleratorConfig, + AutoscalingConfig, Cluster, ClusterConfig, - EndpointConfig, - AutoscalingConfig, + ClusterMetrics, + ClusterStatus, + CreateClusterRequest, + DeleteClusterRequest, + DiagnoseClusterRequest, + DiagnoseClusterResults, + DiskConfig, EncryptionConfig, + EndpointConfig, GceClusterConfig, + GetClusterRequest, InstanceGroupConfig, - ManagedGroupConfig, - AcceleratorConfig, - DiskConfig, - NodeInitializationAction, - ClusterStatus, - SecurityConfig, KerberosConfig, - SoftwareConfig, LifecycleConfig, - ClusterMetrics, - CreateClusterRequest, - UpdateClusterRequest, - DeleteClusterRequest, - GetClusterRequest, ListClustersRequest, ListClustersResponse, - DiagnoseClusterRequest, - DiagnoseClusterResults, + ManagedGroupConfig, + NodeInitializationAction, ReservationAffinity, + SecurityConfig, + SoftwareConfig, + UpdateClusterRequest, ) from .jobs import ( - LoggingConfig, + CancelJobRequest, + DeleteJobRequest, + GetJobRequest, HadoopJob, - SparkJob, - PySparkJob, - QueryList, HiveJob, - SparkSqlJob, - PigJob, - SparkRJob, - PrestoJob, + Job, + JobMetadata, JobPlacement, - JobStatus, JobReference, - YarnApplication, - Job, JobScheduling, - SubmitJobRequest, - JobMetadata, - GetJobRequest, + JobStatus, ListJobsRequest, - UpdateJobRequest, ListJobsResponse, - CancelJobRequest, - DeleteJobRequest, + LoggingConfig, + PigJob, + PrestoJob, + PySparkJob, + QueryList, + SparkJob, + SparkRJob, + SparkSqlJob, + SubmitJobRequest, + UpdateJobRequest, + YarnApplication, ) from .operations import ( - ClusterOperationStatus, ClusterOperationMetadata, + ClusterOperationStatus, ) from .workflow_templates import ( - WorkflowTemplate, - WorkflowTemplatePlacement, - ManagedCluster, + ClusterOperation, ClusterSelector, + CreateWorkflowTemplateRequest, + DeleteWorkflowTemplateRequest, + GetWorkflowTemplateRequest, + InstantiateInlineWorkflowTemplateRequest, + InstantiateWorkflowTemplateRequest, + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ManagedCluster, OrderedJob, - TemplateParameter, ParameterValidation, RegexValidation, + TemplateParameter, + UpdateWorkflowTemplateRequest, ValueValidation, - WorkflowMetadata, - ClusterOperation, WorkflowGraph, + WorkflowMetadata, WorkflowNode, - CreateWorkflowTemplateRequest, - GetWorkflowTemplateRequest, - InstantiateWorkflowTemplateRequest, - InstantiateInlineWorkflowTemplateRequest, - UpdateWorkflowTemplateRequest, - ListWorkflowTemplatesRequest, - ListWorkflowTemplatesResponse, - DeleteWorkflowTemplateRequest, + WorkflowTemplate, + WorkflowTemplatePlacement, ) __all__ = ( "AutoscalingPolicy", "BasicAutoscalingAlgorithm", "BasicYarnAutoscalingConfig", - "InstanceGroupAutoscalingPolicyConfig", "CreateAutoscalingPolicyRequest", - "GetAutoscalingPolicyRequest", - "UpdateAutoscalingPolicyRequest", "DeleteAutoscalingPolicyRequest", + "GetAutoscalingPolicyRequest", + "InstanceGroupAutoscalingPolicyConfig", "ListAutoscalingPoliciesRequest", "ListAutoscalingPoliciesResponse", - "Component", + "UpdateAutoscalingPolicyRequest", + "AcceleratorConfig", + "AutoscalingConfig", "Cluster", "ClusterConfig", - "EndpointConfig", - "AutoscalingConfig", + "ClusterMetrics", + "ClusterStatus", + "CreateClusterRequest", + "DeleteClusterRequest", + "DiagnoseClusterRequest", + "DiagnoseClusterResults", + "DiskConfig", "EncryptionConfig", + "EndpointConfig", "GceClusterConfig", + "GetClusterRequest", "InstanceGroupConfig", - "ManagedGroupConfig", - "AcceleratorConfig", - "DiskConfig", - "NodeInitializationAction", - "ClusterStatus", - "SecurityConfig", "KerberosConfig", - "SoftwareConfig", "LifecycleConfig", - "ClusterMetrics", - "CreateClusterRequest", - "UpdateClusterRequest", - "DeleteClusterRequest", - "GetClusterRequest", "ListClustersRequest", "ListClustersResponse", - "DiagnoseClusterRequest", - "DiagnoseClusterResults", + "ManagedGroupConfig", + "NodeInitializationAction", "ReservationAffinity", - "LoggingConfig", + "SecurityConfig", + "SoftwareConfig", + "UpdateClusterRequest", + "CancelJobRequest", + "DeleteJobRequest", + "GetJobRequest", "HadoopJob", - "SparkJob", - "PySparkJob", - "QueryList", "HiveJob", - "SparkSqlJob", - "PigJob", - "SparkRJob", - "PrestoJob", + "Job", + "JobMetadata", "JobPlacement", - "JobStatus", "JobReference", - "YarnApplication", - "Job", "JobScheduling", - "SubmitJobRequest", - "JobMetadata", - "GetJobRequest", + "JobStatus", "ListJobsRequest", - "UpdateJobRequest", "ListJobsResponse", - "CancelJobRequest", - "DeleteJobRequest", - "ClusterOperationStatus", + "LoggingConfig", + "PigJob", + "PrestoJob", + "PySparkJob", + "QueryList", + "SparkJob", + "SparkRJob", + "SparkSqlJob", + "SubmitJobRequest", + "UpdateJobRequest", + "YarnApplication", "ClusterOperationMetadata", - "WorkflowTemplate", - "WorkflowTemplatePlacement", - "ManagedCluster", + "ClusterOperationStatus", + "Component", + "ClusterOperation", "ClusterSelector", + "CreateWorkflowTemplateRequest", + "DeleteWorkflowTemplateRequest", + "GetWorkflowTemplateRequest", + "InstantiateInlineWorkflowTemplateRequest", + "InstantiateWorkflowTemplateRequest", + "ListWorkflowTemplatesRequest", + "ListWorkflowTemplatesResponse", + "ManagedCluster", "OrderedJob", - "TemplateParameter", "ParameterValidation", "RegexValidation", + "TemplateParameter", + "UpdateWorkflowTemplateRequest", "ValueValidation", - "WorkflowMetadata", - "ClusterOperation", "WorkflowGraph", + "WorkflowMetadata", "WorkflowNode", - "CreateWorkflowTemplateRequest", - "GetWorkflowTemplateRequest", - "InstantiateWorkflowTemplateRequest", - "InstantiateInlineWorkflowTemplateRequest", - "UpdateWorkflowTemplateRequest", - "ListWorkflowTemplatesRequest", - "ListWorkflowTemplatesResponse", - "DeleteWorkflowTemplateRequest", + "WorkflowTemplate", + "WorkflowTemplatePlacement", ) diff --git a/google/cloud/dataproc_v1/types/autoscaling_policies.py b/google/cloud/dataproc_v1/types/autoscaling_policies.py index edd3806a..7fa0779f 100644 --- a/google/cloud/dataproc_v1/types/autoscaling_policies.py +++ b/google/cloud/dataproc_v1/types/autoscaling_policies.py @@ -62,12 +62,12 @@ class AutoscalingPolicy(proto.Message): - For ``projects.locations.autoscalingPolicies``, the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - basic_algorithm (~.autoscaling_policies.BasicAutoscalingAlgorithm): + basic_algorithm (google.cloud.dataproc_v1.types.BasicAutoscalingAlgorithm): - worker_config (~.autoscaling_policies.InstanceGroupAutoscalingPolicyConfig): + worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): Required. Describes how the autoscaler will operate for primary workers. - secondary_worker_config (~.autoscaling_policies.InstanceGroupAutoscalingPolicyConfig): + secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): Optional. Describes how the autoscaler will operate for secondary workers. """ @@ -93,9 +93,9 @@ class BasicAutoscalingAlgorithm(proto.Message): r"""Basic algorithm for autoscaling. Attributes: - yarn_config (~.autoscaling_policies.BasicYarnAutoscalingConfig): + yarn_config (google.cloud.dataproc_v1.types.BasicYarnAutoscalingConfig): Required. YARN autoscaling configuration. - cooldown_period (~.duration.Duration): + cooldown_period (google.protobuf.duration_pb2.Duration): Optional. Duration between scaling events. A scaling period starts after the update operation from the previous event has completed. @@ -114,7 +114,7 @@ class BasicYarnAutoscalingConfig(proto.Message): r"""Basic autoscaling configurations for YARN. Attributes: - graceful_decommission_timeout (~.duration.Duration): + graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): Required. Timeout for YARN graceful decommissioning of Node Managers. Specifies the duration to wait for jobs to complete before forcefully removing workers (and potentially @@ -244,7 +244,7 @@ class CreateAutoscalingPolicyRequest(proto.Message): - For ``projects.locations.autoscalingPolicies.create``, the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` - policy (~.autoscaling_policies.AutoscalingPolicy): + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): Required. The autoscaling policy to create. """ @@ -278,7 +278,7 @@ class UpdateAutoscalingPolicyRequest(proto.Message): r"""A request to update an autoscaling policy. Attributes: - policy (~.autoscaling_policies.AutoscalingPolicy): + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): Required. The updated autoscaling policy. """ @@ -346,7 +346,7 @@ class ListAutoscalingPoliciesResponse(proto.Message): project. Attributes: - policies (Sequence[~.autoscaling_policies.AutoscalingPolicy]): + policies (Sequence[google.cloud.dataproc_v1.types.AutoscalingPolicy]): Output only. Autoscaling policies list. next_page_token (str): Output only. This token is included in the diff --git a/google/cloud/dataproc_v1/types/clusters.py b/google/cloud/dataproc_v1/types/clusters.py index 9cb83872..008b4866 100644 --- a/google/cloud/dataproc_v1/types/clusters.py +++ b/google/cloud/dataproc_v1/types/clusters.py @@ -69,11 +69,11 @@ class Cluster(proto.Message): Required. The cluster name. Cluster names within a project must be unique. Names of deleted clusters can be reused. - config (~.gcd_clusters.ClusterConfig): + config (google.cloud.dataproc_v1.types.ClusterConfig): Required. The cluster config. Note that Dataproc may set default values, and values may change when clusters are updated. - labels (Sequence[~.gcd_clusters.Cluster.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1.types.Cluster.LabelsEntry]): Optional. The labels to associate with this cluster. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -81,15 +81,15 @@ class Cluster(proto.Message): 1 to 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with a cluster. - status (~.gcd_clusters.ClusterStatus): + status (google.cloud.dataproc_v1.types.ClusterStatus): Output only. Cluster status. - status_history (Sequence[~.gcd_clusters.ClusterStatus]): + status_history (Sequence[google.cloud.dataproc_v1.types.ClusterStatus]): Output only. The previous cluster status. cluster_uuid (str): Output only. A cluster UUID (Unique Universal Identifier). Dataproc generates this value when it creates the cluster. - metrics (~.gcd_clusters.ClusterMetrics): + metrics (google.cloud.dataproc_v1.types.ClusterMetrics): Output only. Contains cluster daemon metrics such as HDFS and YARN stats. @@ -142,22 +142,22 @@ class ClusterConfig(proto.Message): this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. - gce_cluster_config (~.gcd_clusters.GceClusterConfig): + gce_cluster_config (google.cloud.dataproc_v1.types.GceClusterConfig): Optional. The shared Compute Engine config settings for all instances in a cluster. - master_config (~.gcd_clusters.InstanceGroupConfig): + master_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): Optional. The Compute Engine config settings for the master instance in a cluster. - worker_config (~.gcd_clusters.InstanceGroupConfig): + worker_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): Optional. The Compute Engine config settings for worker instances in a cluster. - secondary_worker_config (~.gcd_clusters.InstanceGroupConfig): + secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): Optional. The Compute Engine config settings for additional worker instances in a cluster. - software_config (~.gcd_clusters.SoftwareConfig): + software_config (google.cloud.dataproc_v1.types.SoftwareConfig): Optional. The config settings for software inside the cluster. - initialization_actions (Sequence[~.gcd_clusters.NodeInitializationAction]): + initialization_actions (Sequence[google.cloud.dataproc_v1.types.NodeInitializationAction]): Optional. Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's ``role`` metadata to run @@ -173,18 +173,18 @@ class ClusterConfig(proto.Message): else ... worker specific actions ... fi - encryption_config (~.gcd_clusters.EncryptionConfig): + encryption_config (google.cloud.dataproc_v1.types.EncryptionConfig): Optional. Encryption settings for the cluster. - autoscaling_config (~.gcd_clusters.AutoscalingConfig): + autoscaling_config (google.cloud.dataproc_v1.types.AutoscalingConfig): Optional. Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset. - security_config (~.gcd_clusters.SecurityConfig): + security_config (google.cloud.dataproc_v1.types.SecurityConfig): Optional. Security settings for the cluster. - lifecycle_config (~.gcd_clusters.LifecycleConfig): + lifecycle_config (google.cloud.dataproc_v1.types.LifecycleConfig): Optional. Lifecycle setting for the cluster. - endpoint_config (~.gcd_clusters.EndpointConfig): + endpoint_config (google.cloud.dataproc_v1.types.EndpointConfig): Optional. Port/endpoint configuration for this cluster """ @@ -232,7 +232,7 @@ class EndpointConfig(proto.Message): r"""Endpoint config for this cluster Attributes: - http_ports (Sequence[~.gcd_clusters.EndpointConfig.HttpPortsEntry]): + http_ports (Sequence[google.cloud.dataproc_v1.types.EndpointConfig.HttpPortsEntry]): Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true. enable_http_port_access (bool): @@ -360,11 +360,11 @@ class GceClusterConfig(proto.Message): The Compute Engine tags to add to all instances (see `Tagging instances `__). - metadata (Sequence[~.gcd_clusters.GceClusterConfig.MetadataEntry]): + metadata (Sequence[google.cloud.dataproc_v1.types.GceClusterConfig.MetadataEntry]): The Compute Engine metadata entries to add to all instances (see `Project and instance metadata `__). - reservation_affinity (~.gcd_clusters.ReservationAffinity): + reservation_affinity (google.cloud.dataproc_v1.types.ReservationAffinity): Optional. Reservation Affinity for consuming Zonal reservation. """ @@ -438,12 +438,12 @@ class InstanceGroupConfig(proto.Message): Placement `__ feature, you must use the short name of the machine type resource, for example, ``n1-standard-2``. - disk_config (~.gcd_clusters.DiskConfig): + disk_config (google.cloud.dataproc_v1.types.DiskConfig): Optional. Disk option config settings. is_preemptible (bool): Output only. Specifies that this instance group contains preemptible instances. - preemptibility (~.gcd_clusters.InstanceGroupConfig.Preemptibility): + preemptibility (google.cloud.dataproc_v1.types.InstanceGroupConfig.Preemptibility): Optional. Specifies the preemptibility of the instance group. @@ -452,12 +452,12 @@ class InstanceGroupConfig(proto.Message): The default value for secondary instances is ``PREEMPTIBLE``. - managed_group_config (~.gcd_clusters.ManagedGroupConfig): + managed_group_config (google.cloud.dataproc_v1.types.ManagedGroupConfig): Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. - accelerators (Sequence[~.gcd_clusters.AcceleratorConfig]): + accelerators (Sequence[google.cloud.dataproc_v1.types.AcceleratorConfig]): Optional. The Compute Engine accelerator configuration for these instances. min_cpu_platform (str): @@ -588,7 +588,7 @@ class NodeInitializationAction(proto.Message): executable_file (str): Required. Cloud Storage URI of executable file. - execution_timeout (~.duration.Duration): + execution_timeout (google.protobuf.duration_pb2.Duration): Optional. Amount of time executable has to complete. Default is 10 minutes (see JSON representation of `Duration `__). @@ -608,16 +608,16 @@ class ClusterStatus(proto.Message): r"""The status of a cluster and its instances. Attributes: - state (~.gcd_clusters.ClusterStatus.State): + state (google.cloud.dataproc_v1.types.ClusterStatus.State): Output only. The cluster's state. detail (str): Optional. Output only. Details of cluster's state. - state_start_time (~.timestamp.Timestamp): + state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when this state was entered (see JSON representation of `Timestamp `__). - substate (~.gcd_clusters.ClusterStatus.Substate): + substate (google.cloud.dataproc_v1.types.ClusterStatus.Substate): Output only. Additional state information that includes status reported by the agent. """ @@ -652,7 +652,7 @@ class SecurityConfig(proto.Message): r"""Security related configuration, including Kerberos. Attributes: - kerberos_config (~.gcd_clusters.KerberosConfig): + kerberos_config (google.cloud.dataproc_v1.types.KerberosConfig): Kerberos related configuration. """ @@ -778,7 +778,7 @@ class SoftwareConfig(proto.Message): "1.2.29"), or the `"preview" version `__. If unspecified, it defaults to the latest Debian version. - properties (Sequence[~.gcd_clusters.SoftwareConfig.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.SoftwareConfig.PropertiesEntry]): Optional. The properties to set on daemon config files. Property keys are specified in ``prefix:property`` format, @@ -797,7 +797,7 @@ class SoftwareConfig(proto.Message): For more information, see `Cluster properties `__. - optional_components (Sequence[~.shared.Component]): + optional_components (Sequence[google.cloud.dataproc_v1.types.Component]): Optional. The set of components to activate on the cluster. """ @@ -815,24 +815,24 @@ class LifecycleConfig(proto.Message): r"""Specifies the cluster auto-delete schedule configuration. Attributes: - idle_delete_ttl (~.duration.Duration): + idle_delete_ttl (google.protobuf.duration_pb2.Duration): Optional. The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of `Duration `__. - auto_delete_time (~.timestamp.Timestamp): + auto_delete_time (google.protobuf.timestamp_pb2.Timestamp): Optional. The time when cluster will be auto-deleted (see JSON representation of `Timestamp `__). - auto_delete_ttl (~.duration.Duration): + auto_delete_ttl (google.protobuf.duration_pb2.Duration): Optional. The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of `Duration `__). - idle_start_time (~.timestamp.Timestamp): + idle_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of @@ -859,9 +859,9 @@ class ClusterMetrics(proto.Message): only. It may be changed before final release. Attributes: - hdfs_metrics (Sequence[~.gcd_clusters.ClusterMetrics.HdfsMetricsEntry]): + hdfs_metrics (Sequence[google.cloud.dataproc_v1.types.ClusterMetrics.HdfsMetricsEntry]): The HDFS metrics. - yarn_metrics (Sequence[~.gcd_clusters.ClusterMetrics.YarnMetricsEntry]): + yarn_metrics (Sequence[google.cloud.dataproc_v1.types.ClusterMetrics.YarnMetricsEntry]): The YARN metrics. """ @@ -880,7 +880,7 @@ class CreateClusterRequest(proto.Message): region (str): Required. The Dataproc region in which to handle the request. - cluster (~.gcd_clusters.Cluster): + cluster (google.cloud.dataproc_v1.types.Cluster): Required. The cluster to create. request_id (str): Optional. A unique id used to identify the request. If the @@ -920,9 +920,9 @@ class UpdateClusterRequest(proto.Message): handle the request. cluster_name (str): Required. The cluster name. - cluster (~.gcd_clusters.Cluster): + cluster (google.cloud.dataproc_v1.types.Cluster): Required. The changes to the cluster. - graceful_decommission_timeout (~.duration.Duration): + graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): Optional. Timeout for graceful YARN decomissioning. Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. Timeout specifies how @@ -934,7 +934,7 @@ class UpdateClusterRequest(proto.Message): `Duration `__). Only supported on Dataproc image versions 1.2 and higher. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers in a cluster to 5, the ``update_mask`` parameter @@ -1148,7 +1148,7 @@ class ListClustersResponse(proto.Message): r"""The list of all clusters in a project. Attributes: - clusters (Sequence[~.gcd_clusters.Cluster]): + clusters (Sequence[google.cloud.dataproc_v1.types.Cluster]): Output only. The clusters in the project. next_page_token (str): Output only. This token is included in the response if there @@ -1205,7 +1205,7 @@ class ReservationAffinity(proto.Message): r"""Reservation Affinity for consuming Zonal reservation. Attributes: - consume_reservation_type (~.gcd_clusters.ReservationAffinity.Type): + consume_reservation_type (google.cloud.dataproc_v1.types.ReservationAffinity.Type): Optional. Type of reservation to consume key (str): Optional. Corresponds to the label key of diff --git a/google/cloud/dataproc_v1/types/jobs.py b/google/cloud/dataproc_v1/types/jobs.py index 84c0e3f6..cfb19555 100644 --- a/google/cloud/dataproc_v1/types/jobs.py +++ b/google/cloud/dataproc_v1/types/jobs.py @@ -57,7 +57,7 @@ class LoggingConfig(proto.Message): r"""The runtime logging config of the job. Attributes: - driver_log_levels (Sequence[~.gcd_jobs.LoggingConfig.DriverLogLevelsEntry]): + driver_log_levels (Sequence[google.cloud.dataproc_v1.types.LoggingConfig.DriverLogLevelsEntry]): The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: @@ -120,13 +120,13 @@ class HadoopJob(proto.Message): extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip. - properties (Sequence[~.gcd_jobs.HadoopJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.HadoopJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -177,14 +177,14 @@ class SparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[~.gcd_jobs.SparkJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.SparkJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -236,7 +236,7 @@ class PySparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[~.gcd_jobs.PySparkJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.PySparkJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc @@ -244,7 +244,7 @@ class PySparkJob(proto.Message): set in /etc/spark/conf/spark-defaults.conf and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -301,16 +301,16 @@ class HiveJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains Hive queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. continue_on_failure (bool): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[~.gcd_jobs.HiveJob.ScriptVariablesEntry]): + script_variables (Sequence[google.cloud.dataproc_v1.types.HiveJob.ScriptVariablesEntry]): Optional. Mapping of query variable names to values (equivalent to the Hive command: ``SET name="value";``). - properties (Sequence[~.gcd_jobs.HiveJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.HiveJob.PropertiesEntry]): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -346,13 +346,13 @@ class SparkSqlJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains SQL queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. - script_variables (Sequence[~.gcd_jobs.SparkSqlJob.ScriptVariablesEntry]): + script_variables (Sequence[google.cloud.dataproc_v1.types.SparkSqlJob.ScriptVariablesEntry]): Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET ``name="value";``). - properties (Sequence[~.gcd_jobs.SparkSqlJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.SparkSqlJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the @@ -360,7 +360,7 @@ class SparkSqlJob(proto.Message): jar_file_uris (Sequence[str]): Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -388,16 +388,16 @@ class PigJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains the Pig queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. continue_on_failure (bool): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[~.gcd_jobs.PigJob.ScriptVariablesEntry]): + script_variables (Sequence[google.cloud.dataproc_v1.types.PigJob.ScriptVariablesEntry]): Optional. Mapping of query variable names to values (equivalent to the Pig command: ``name=[value]``). - properties (Sequence[~.gcd_jobs.PigJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.PigJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -407,7 +407,7 @@ class PigJob(proto.Message): Optional. HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -452,7 +452,7 @@ class SparkRJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[~.gcd_jobs.SparkRJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.SparkRJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc @@ -460,7 +460,7 @@ class SparkRJob(proto.Message): set in /etc/spark/conf/spark-defaults.conf and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -489,7 +489,7 @@ class PrestoJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains SQL queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1.types.QueryList): A list of queries. continue_on_failure (bool): Optional. Whether to continue executing queries if a query @@ -502,12 +502,12 @@ class PrestoJob(proto.Message): client_tags (Sequence[str]): Optional. Presto client tags to attach to this query - properties (Sequence[~.gcd_jobs.PrestoJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1.types.PrestoJob.PropertiesEntry]): Optional. A mapping of property names to values. Used to set Presto `session properties `__ Equivalent to using the --session flag in the Presto CLI - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -550,17 +550,17 @@ class JobStatus(proto.Message): r"""Dataproc job status. Attributes: - state (~.gcd_jobs.JobStatus.State): + state (google.cloud.dataproc_v1.types.JobStatus.State): Output only. A state message specifying the overall job state. details (str): Optional. Output only. Job state details, such as an error description if the state is ERROR. - state_start_time (~.timestamp.Timestamp): + state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when this state was entered. - substate (~.gcd_jobs.JobStatus.Substate): + substate (google.cloud.dataproc_v1.types.JobStatus.Substate): Output only. Additional state information, which includes status reported by the agent. """ @@ -632,7 +632,7 @@ class YarnApplication(proto.Message): Attributes: name (str): Required. The application name. - state (~.gcd_jobs.YarnApplication.State): + state (google.cloud.dataproc_v1.types.YarnApplication.State): Required. The application state. progress (float): Required. The numerical progress of the @@ -673,37 +673,37 @@ class Job(proto.Message): r"""A Dataproc job resource. Attributes: - reference (~.gcd_jobs.JobReference): + reference (google.cloud.dataproc_v1.types.JobReference): Optional. The fully qualified reference to the job, which can be used to obtain the equivalent REST path of the job resource. If this property is not specified when a job is created, the server generates a job_id. - placement (~.gcd_jobs.JobPlacement): + placement (google.cloud.dataproc_v1.types.JobPlacement): Required. Job information, including how, when, and where to run the job. - hadoop_job (~.gcd_jobs.HadoopJob): + hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): Optional. Job is a Hadoop job. - spark_job (~.gcd_jobs.SparkJob): + spark_job (google.cloud.dataproc_v1.types.SparkJob): Optional. Job is a Spark job. - pyspark_job (~.gcd_jobs.PySparkJob): + pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): Optional. Job is a PySpark job. - hive_job (~.gcd_jobs.HiveJob): + hive_job (google.cloud.dataproc_v1.types.HiveJob): Optional. Job is a Hive job. - pig_job (~.gcd_jobs.PigJob): + pig_job (google.cloud.dataproc_v1.types.PigJob): Optional. Job is a Pig job. - spark_r_job (~.gcd_jobs.SparkRJob): + spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): Optional. Job is a SparkR job. - spark_sql_job (~.gcd_jobs.SparkSqlJob): + spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): Optional. Job is a SparkSql job. - presto_job (~.gcd_jobs.PrestoJob): + presto_job (google.cloud.dataproc_v1.types.PrestoJob): Optional. Job is a Presto job. - status (~.gcd_jobs.JobStatus): + status (google.cloud.dataproc_v1.types.JobStatus): Output only. The job status. Additional application-specific status information may be contained in the type_job and yarn_applications fields. - status_history (Sequence[~.gcd_jobs.JobStatus]): + status_history (Sequence[google.cloud.dataproc_v1.types.JobStatus]): Output only. The previous job status. - yarn_applications (Sequence[~.gcd_jobs.YarnApplication]): + yarn_applications (Sequence[google.cloud.dataproc_v1.types.YarnApplication]): Output only. The collection of YARN applications spun up by this job. @@ -717,7 +717,7 @@ class Job(proto.Message): control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as ``driver_output_uri``. - labels (Sequence[~.gcd_jobs.Job.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1.types.Job.LabelsEntry]): Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -725,7 +725,7 @@ class Job(proto.Message): 1 to 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with a job. - scheduling (~.gcd_jobs.JobScheduling): + scheduling (google.cloud.dataproc_v1.types.JobScheduling): Optional. Job scheduling configuration. job_uuid (str): Output only. A UUID that uniquely identifies a job within @@ -823,7 +823,7 @@ class SubmitJobRequest(proto.Message): region (str): Required. The Dataproc region in which to handle the request. - job (~.gcd_jobs.Job): + job (google.cloud.dataproc_v1.types.Job): Required. The job resource. request_id (str): Optional. A unique id used to identify the request. If the @@ -856,11 +856,11 @@ class JobMetadata(proto.Message): Attributes: job_id (str): Output only. The job id. - status (~.gcd_jobs.JobStatus): + status (google.cloud.dataproc_v1.types.JobStatus): Output only. Most recent job status. operation_type (str): Output only. Operation type. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Job submission time. """ @@ -916,7 +916,7 @@ class ListJobsRequest(proto.Message): Optional. If set, the returned jobs list includes only jobs that were submitted to the named cluster. - job_state_matcher (~.gcd_jobs.ListJobsRequest.JobStateMatcher): + job_state_matcher (google.cloud.dataproc_v1.types.ListJobsRequest.JobStateMatcher): Optional. Specifies enumerated categories of jobs to list. (default = match ALL jobs). @@ -974,9 +974,9 @@ class UpdateJobRequest(proto.Message): handle the request. job_id (str): Required. The job ID. - job (~.gcd_jobs.Job): + job (google.cloud.dataproc_v1.types.Job): Required. The changes to the job. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Specifies the path, relative to Job, of the field to update. For example, to update the labels of a Job the update_mask parameter would be specified as labels, and the @@ -999,7 +999,7 @@ class ListJobsResponse(proto.Message): r"""A list of jobs in a project. Attributes: - jobs (Sequence[~.gcd_jobs.Job]): + jobs (Sequence[google.cloud.dataproc_v1.types.Job]): Output only. Jobs list. next_page_token (str): Optional. This token is included in the response if there diff --git a/google/cloud/dataproc_v1/types/operations.py b/google/cloud/dataproc_v1/types/operations.py index 042e8c77..4584b2ab 100644 --- a/google/cloud/dataproc_v1/types/operations.py +++ b/google/cloud/dataproc_v1/types/operations.py @@ -31,7 +31,7 @@ class ClusterOperationStatus(proto.Message): r"""The status of the operation. Attributes: - state (~.operations.ClusterOperationStatus.State): + state (google.cloud.dataproc_v1.types.ClusterOperationStatus.State): Output only. A message containing the operation state. inner_state (str): @@ -40,7 +40,7 @@ class ClusterOperationStatus(proto.Message): details (str): Output only. A message containing any operation metadata details. - state_start_time (~.timestamp.Timestamp): + state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time this state was entered. """ @@ -71,15 +71,15 @@ class ClusterOperationMetadata(proto.Message): operation. cluster_uuid (str): Output only. Cluster UUID for the operation. - status (~.operations.ClusterOperationStatus): + status (google.cloud.dataproc_v1.types.ClusterOperationStatus): Output only. Current operation status. - status_history (Sequence[~.operations.ClusterOperationStatus]): + status_history (Sequence[google.cloud.dataproc_v1.types.ClusterOperationStatus]): Output only. The previous operation status. operation_type (str): Output only. The operation type. description (str): Output only. Short description of operation. - labels (Sequence[~.operations.ClusterOperationMetadata.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1.types.ClusterOperationMetadata.LabelsEntry]): Output only. Labels associated with the operation warnings (Sequence[str]): diff --git a/google/cloud/dataproc_v1/types/workflow_templates.py b/google/cloud/dataproc_v1/types/workflow_templates.py index 50e8a469..027fbc74 100644 --- a/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/google/cloud/dataproc_v1/types/workflow_templates.py @@ -81,12 +81,12 @@ class WorkflowTemplate(proto.Message): ``version`` field filled in with the current server version. The user updates other fields in the template, then returns it as part of the ``UpdateWorkflowTemplate`` request. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time template was created. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time template was last updated. - labels (Sequence[~.workflow_templates.WorkflowTemplate.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1.types.WorkflowTemplate.LabelsEntry]): Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. @@ -100,13 +100,13 @@ class WorkflowTemplate(proto.Message): 1035 `__. No more than 32 labels can be associated with a template. - placement (~.workflow_templates.WorkflowTemplatePlacement): + placement (google.cloud.dataproc_v1.types.WorkflowTemplatePlacement): Required. WorkflowTemplate scheduling information. - jobs (Sequence[~.workflow_templates.OrderedJob]): + jobs (Sequence[google.cloud.dataproc_v1.types.OrderedJob]): Required. The Directed Acyclic Graph of Jobs to submit. - parameters (Sequence[~.workflow_templates.TemplateParameter]): + parameters (Sequence[google.cloud.dataproc_v1.types.TemplateParameter]): Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is @@ -142,9 +142,9 @@ class WorkflowTemplatePlacement(proto.Message): Either ``managed_cluster`` or ``cluster_selector`` is required. Attributes: - managed_cluster (~.workflow_templates.ManagedCluster): + managed_cluster (google.cloud.dataproc_v1.types.ManagedCluster): A cluster that is managed by the workflow. - cluster_selector (~.workflow_templates.ClusterSelector): + cluster_selector (google.cloud.dataproc_v1.types.ClusterSelector): Optional. A selector that chooses target cluster for jobs based on metadata. @@ -174,9 +174,9 @@ class ManagedCluster(proto.Message): begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. - config (~.clusters.ClusterConfig): + config (google.cloud.dataproc_v1.types.ClusterConfig): Required. The cluster configuration. - labels (Sequence[~.workflow_templates.ManagedCluster.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1.types.ManagedCluster.LabelsEntry]): Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and @@ -209,7 +209,7 @@ class ClusterSelector(proto.Message): selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used. - cluster_labels (Sequence[~.workflow_templates.ClusterSelector.ClusterLabelsEntry]): + cluster_labels (Sequence[google.cloud.dataproc_v1.types.ClusterSelector.ClusterLabelsEntry]): Required. The cluster labels. Cluster must have all labels to match. """ @@ -236,23 +236,23 @@ class OrderedJob(proto.Message): underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. - hadoop_job (~.gcd_jobs.HadoopJob): + hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): Optional. Job is a Hadoop job. - spark_job (~.gcd_jobs.SparkJob): + spark_job (google.cloud.dataproc_v1.types.SparkJob): Optional. Job is a Spark job. - pyspark_job (~.gcd_jobs.PySparkJob): + pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): Optional. Job is a PySpark job. - hive_job (~.gcd_jobs.HiveJob): + hive_job (google.cloud.dataproc_v1.types.HiveJob): Optional. Job is a Hive job. - pig_job (~.gcd_jobs.PigJob): + pig_job (google.cloud.dataproc_v1.types.PigJob): Optional. Job is a Pig job. - spark_r_job (~.gcd_jobs.SparkRJob): + spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): Optional. Job is a SparkR job. - spark_sql_job (~.gcd_jobs.SparkSqlJob): + spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): Optional. Job is a SparkSql job. - presto_job (~.gcd_jobs.PrestoJob): + presto_job (google.cloud.dataproc_v1.types.PrestoJob): Optional. Job is a Presto job. - labels (Sequence[~.workflow_templates.OrderedJob.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1.types.OrderedJob.LabelsEntry]): Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long, and @@ -264,7 +264,7 @@ class OrderedJob(proto.Message): [\p{Ll}\p{Lo}\p{N}_-]{0,63} No more than 32 labels can be associated with a given job. - scheduling (~.gcd_jobs.JobScheduling): + scheduling (google.cloud.dataproc_v1.types.JobScheduling): Optional. Job scheduling configuration. prerequisite_step_ids (Sequence[str]): Optional. The optional list of prerequisite job step_ids. If @@ -387,7 +387,7 @@ class TemplateParameter(proto.Message): description (str): Optional. Brief description of the parameter. Must not exceed 1024 characters. - validation (~.workflow_templates.ParameterValidation): + validation (google.cloud.dataproc_v1.types.ParameterValidation): Optional. Validation rules to be applied to this parameter's value. """ @@ -405,9 +405,9 @@ class ParameterValidation(proto.Message): r"""Configuration for parameter validation. Attributes: - regex (~.workflow_templates.RegexValidation): + regex (google.cloud.dataproc_v1.types.RegexValidation): Validation based on regular expressions. - values (~.workflow_templates.ValueValidation): + values (google.cloud.dataproc_v1.types.ValueValidation): Validation based on a list of allowed values. """ @@ -465,24 +465,24 @@ class WorkflowMetadata(proto.Message): version (int): Output only. The version of template at the time of workflow instantiation. - create_cluster (~.workflow_templates.ClusterOperation): + create_cluster (google.cloud.dataproc_v1.types.ClusterOperation): Output only. The create cluster operation metadata. - graph (~.workflow_templates.WorkflowGraph): + graph (google.cloud.dataproc_v1.types.WorkflowGraph): Output only. The workflow graph. - delete_cluster (~.workflow_templates.ClusterOperation): + delete_cluster (google.cloud.dataproc_v1.types.ClusterOperation): Output only. The delete cluster operation metadata. - state (~.workflow_templates.WorkflowMetadata.State): + state (google.cloud.dataproc_v1.types.WorkflowMetadata.State): Output only. The workflow state. cluster_name (str): Output only. The name of the target cluster. - parameters (Sequence[~.workflow_templates.WorkflowMetadata.ParametersEntry]): + parameters (Sequence[google.cloud.dataproc_v1.types.WorkflowMetadata.ParametersEntry]): Map from parameter names to values that were used for those parameters. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Workflow start time. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Workflow end time. cluster_uuid (str): Output only. The UUID of target cluster. @@ -541,7 +541,7 @@ class WorkflowGraph(proto.Message): r"""The workflow graph. Attributes: - nodes (Sequence[~.workflow_templates.WorkflowNode]): + nodes (Sequence[google.cloud.dataproc_v1.types.WorkflowNode]): Output only. The workflow nodes. """ @@ -559,7 +559,7 @@ class WorkflowNode(proto.Message): job_id (str): Output only. The job id; populated after the node enters RUNNING state. - state (~.workflow_templates.WorkflowNode.NodeState): + state (google.cloud.dataproc_v1.types.WorkflowNode.NodeState): Output only. The node state. error (str): Output only. The error detail. @@ -601,7 +601,7 @@ class CreateWorkflowTemplateRequest(proto.Message): - For ``projects.locations.workflowTemplates.create``, the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` - template (~.workflow_templates.WorkflowTemplate): + template (google.cloud.dataproc_v1.types.WorkflowTemplate): Required. The Dataproc workflow template to create. """ @@ -675,7 +675,7 @@ class InstantiateWorkflowTemplateRequest(proto.Message): The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. - parameters (Sequence[~.workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry]): + parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. @@ -708,7 +708,7 @@ class InstantiateInlineWorkflowTemplateRequest(proto.Message): ``projects.locations.workflowTemplates.instantiateinline``, the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` - template (~.workflow_templates.WorkflowTemplate): + template (google.cloud.dataproc_v1.types.WorkflowTemplate): Required. The workflow template to instantiate. request_id (str): @@ -735,7 +735,7 @@ class UpdateWorkflowTemplateRequest(proto.Message): r"""A request to update a workflow template. Attributes: - template (~.workflow_templates.WorkflowTemplate): + template (google.cloud.dataproc_v1.types.WorkflowTemplate): Required. The updated workflow template. The ``template.version`` field must match the current @@ -782,7 +782,7 @@ class ListWorkflowTemplatesResponse(proto.Message): project. Attributes: - templates (Sequence[~.workflow_templates.WorkflowTemplate]): + templates (Sequence[google.cloud.dataproc_v1.types.WorkflowTemplate]): Output only. WorkflowTemplates list. next_page_token (str): Output only. This token is included in the response if there diff --git a/google/cloud/dataproc_v1beta2/__init__.py b/google/cloud/dataproc_v1beta2/__init__.py index 7af81c6b..1a0d3c1a 100644 --- a/google/cloud/dataproc_v1beta2/__init__.py +++ b/google/cloud/dataproc_v1beta2/__init__.py @@ -110,6 +110,7 @@ "AcceleratorConfig", "AutoscalingConfig", "AutoscalingPolicy", + "AutoscalingPolicyServiceClient", "BasicAutoscalingAlgorithm", "BasicYarnAutoscalingConfig", "CancelJobRequest", @@ -193,7 +194,6 @@ "WorkflowNode", "WorkflowTemplate", "WorkflowTemplatePlacement", - "WorkflowTemplateServiceClient", "YarnApplication", - "AutoscalingPolicyServiceClient", + "WorkflowTemplateServiceClient", ) diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py index d3f3c9c9..5b254961 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py @@ -86,7 +86,36 @@ class AutoscalingPolicyServiceAsyncClient: AutoscalingPolicyServiceClient.parse_common_location_path ) - from_service_account_file = AutoscalingPolicyServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceAsyncClient: The constructed client. + """ + return AutoscalingPolicyServiceClient.from_service_account_info.__func__(AutoscalingPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceAsyncClient: The constructed client. + """ + return AutoscalingPolicyServiceClient.from_service_account_file.__func__(AutoscalingPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -164,7 +193,7 @@ async def create_autoscaling_policy( r"""Creates new autoscaling policy. Args: - request (:class:`~.autoscaling_policies.CreateAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.CreateAutoscalingPolicyRequest`): The request object. A request to create an autoscaling policy. parent (:class:`str`): @@ -180,12 +209,14 @@ async def create_autoscaling_policy( ``projects.locations.autoscalingPolicies.create``, the resource name has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (:class:`google.cloud.dataproc_v1beta2.types.AutoscalingPolicy`): Required. The autoscaling policy to create. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -197,7 +228,7 @@ async def create_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1beta2.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -257,12 +288,13 @@ async def update_autoscaling_policy( replacements. Args: - request (:class:`~.autoscaling_policies.UpdateAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.UpdateAutoscalingPolicyRequest`): The request object. A request to update an autoscaling policy. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (:class:`google.cloud.dataproc_v1beta2.types.AutoscalingPolicy`): Required. The updated autoscaling policy. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -274,7 +306,7 @@ async def update_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1beta2.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -308,6 +340,7 @@ async def update_autoscaling_policy( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -339,7 +372,7 @@ async def get_autoscaling_policy( r"""Retrieves autoscaling policy. Args: - request (:class:`~.autoscaling_policies.GetAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.GetAutoscalingPolicyRequest`): The request object. A request to fetch an autoscaling policy. name (:class:`str`): @@ -355,6 +388,7 @@ async def get_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -366,7 +400,7 @@ async def get_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1beta2.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -400,6 +434,7 @@ async def get_autoscaling_policy( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -429,7 +464,7 @@ async def list_autoscaling_policies( r"""Lists autoscaling policies in the project. Args: - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesRequest`): The request object. A request to list autoscaling policies in a project. parent (:class:`str`): @@ -445,6 +480,7 @@ async def list_autoscaling_policies( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -456,7 +492,7 @@ async def list_autoscaling_policies( sent along with the request as metadata. Returns: - ~.pagers.ListAutoscalingPoliciesAsyncPager: + google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesAsyncPager: A response to a request to list autoscaling policies in a project. Iterating over this object will yield @@ -493,6 +529,7 @@ async def list_autoscaling_policies( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -530,7 +567,7 @@ async def delete_autoscaling_policy( more clusters. Args: - request (:class:`~.autoscaling_policies.DeleteAutoscalingPolicyRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.DeleteAutoscalingPolicyRequest`): The request object. A request to delete an autoscaling policy. Autoscaling policies in use by one or more clusters will @@ -550,6 +587,7 @@ async def delete_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py index bc80019f..d7ac0e31 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py @@ -114,6 +114,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -126,7 +142,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + AutoscalingPolicyServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -236,10 +252,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.AutoscalingPolicyServiceTransport]): The + transport (Union[str, AutoscalingPolicyServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -275,21 +291,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -332,7 +344,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -350,10 +362,10 @@ def create_autoscaling_policy( r"""Creates new autoscaling policy. Args: - request (:class:`~.autoscaling_policies.CreateAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1beta2.types.CreateAutoscalingPolicyRequest): The request object. A request to create an autoscaling policy. - parent (:class:`str`): + parent (str): Required. The "resource name" of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -366,12 +378,14 @@ def create_autoscaling_policy( ``projects.locations.autoscalingPolicies.create``, the resource name has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (google.cloud.dataproc_v1beta2.types.AutoscalingPolicy): Required. The autoscaling policy to create. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -383,7 +397,7 @@ def create_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1beta2.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -446,12 +460,13 @@ def update_autoscaling_policy( replacements. Args: - request (:class:`~.autoscaling_policies.UpdateAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1beta2.types.UpdateAutoscalingPolicyRequest): The request object. A request to update an autoscaling policy. - policy (:class:`~.autoscaling_policies.AutoscalingPolicy`): + policy (google.cloud.dataproc_v1beta2.types.AutoscalingPolicy): Required. The updated autoscaling policy. + This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -463,7 +478,7 @@ def update_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1beta2.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -523,10 +538,10 @@ def get_autoscaling_policy( r"""Retrieves autoscaling policy. Args: - request (:class:`~.autoscaling_policies.GetAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1beta2.types.GetAutoscalingPolicyRequest): The request object. A request to fetch an autoscaling policy. - name (:class:`str`): + name (str): Required. The "resource name" of the autoscaling policy, as described in https://cloud.google.com/apis/design/resource_names. @@ -539,6 +554,7 @@ def get_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -550,7 +566,7 @@ def get_autoscaling_policy( sent along with the request as metadata. Returns: - ~.autoscaling_policies.AutoscalingPolicy: + google.cloud.dataproc_v1beta2.types.AutoscalingPolicy: Describes an autoscaling policy for Dataproc cluster autoscaler. @@ -606,10 +622,10 @@ def list_autoscaling_policies( r"""Lists autoscaling policies in the project. Args: - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesRequest): The request object. A request to list autoscaling policies in a project. - parent (:class:`str`): + parent (str): Required. The "resource name" of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -622,6 +638,7 @@ def list_autoscaling_policies( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -633,7 +650,7 @@ def list_autoscaling_policies( sent along with the request as metadata. Returns: - ~.pagers.ListAutoscalingPoliciesPager: + google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesPager: A response to a request to list autoscaling policies in a project. Iterating over this object will yield @@ -702,12 +719,12 @@ def delete_autoscaling_policy( more clusters. Args: - request (:class:`~.autoscaling_policies.DeleteAutoscalingPolicyRequest`): + request (google.cloud.dataproc_v1beta2.types.DeleteAutoscalingPolicyRequest): The request object. A request to delete an autoscaling policy. Autoscaling policies in use by one or more clusters will not be deleted. - name (:class:`str`): + name (str): Required. The "resource name" of the autoscaling policy, as described in https://cloud.google.com/apis/design/resource_names. @@ -722,6 +739,7 @@ def delete_autoscaling_policy( the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py index 4a9a6942..b74bf96a 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1beta2.types import autoscaling_policies @@ -24,7 +33,7 @@ class ListAutoscalingPoliciesPager: """A pager for iterating through ``list_autoscaling_policies`` requests. This class thinly wraps an initial - :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesResponse` object, and provides an ``__iter__`` method to iterate through its ``policies`` field. @@ -33,7 +42,7 @@ class ListAutoscalingPoliciesPager: through the ``policies`` field on the corresponding responses. - All the usual :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesRequest): The initial request object. - response (:class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse`): + response (google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListAutoscalingPoliciesAsyncPager: """A pager for iterating through ``list_autoscaling_policies`` requests. This class thinly wraps an initial - :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesResponse` object, and provides an ``__aiter__`` method to iterate through its ``policies`` field. @@ -95,7 +104,7 @@ class ListAutoscalingPoliciesAsyncPager: through the ``policies`` field on the corresponding responses. - All the usual :class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -115,9 +124,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.autoscaling_policies.ListAutoscalingPoliciesRequest`): + request (google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesRequest): The initial request object. - response (:class:`~.autoscaling_policies.ListAutoscalingPoliciesResponse`): + response (google.cloud.dataproc_v1beta2.types.ListAutoscalingPoliciesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py index bc039c5e..64375cee 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py @@ -68,10 +68,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -79,6 +79,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -88,20 +91,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -119,6 +119,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -132,6 +133,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -145,6 +147,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py index 01896af1..28018b7c 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py @@ -59,6 +59,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -89,6 +90,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -103,72 +108,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -176,17 +169,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -200,7 +184,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py index cf4811e1..53d446d8 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -63,7 +63,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -103,6 +103,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -134,12 +135,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -148,72 +153,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -221,17 +214,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py index 9ed3eff3..a9f5f7e7 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py @@ -83,7 +83,36 @@ class ClusterControllerAsyncClient: ClusterControllerClient.parse_common_location_path ) - from_service_account_file = ClusterControllerClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerAsyncClient: The constructed client. + """ + return ClusterControllerClient.from_service_account_info.__func__(ClusterControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerAsyncClient: The constructed client. + """ + return ClusterControllerClient.from_service_account_file.__func__(ClusterControllerAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -164,22 +193,24 @@ async def create_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.CreateClusterRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.CreateClusterRequest`): The request object. A request to create a cluster. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (:class:`google.cloud.dataproc_v1beta2.types.Cluster`): Required. The cluster to create. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this @@ -192,13 +223,11 @@ async def create_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1beta2.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -232,6 +261,7 @@ async def create_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -270,17 +300,19 @@ async def update_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.UpdateClusterRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.UpdateClusterRequest`): The request object. A request to update a cluster. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -289,12 +321,12 @@ async def update_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (:class:`google.cloud.dataproc_v1beta2.types.Cluster`): Required. The changes to the cluster. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers in a cluster to 5, the ``update_mask`` @@ -365,6 +397,7 @@ async def update_cluster( autoscaling policies + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -376,13 +409,11 @@ async def update_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1beta2.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -422,6 +453,7 @@ async def update_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -458,18 +490,20 @@ async def delete_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.DeleteClusterRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.DeleteClusterRequest`): The request object. A request to delete a cluster. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -486,24 +520,22 @@ async def delete_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -537,6 +569,7 @@ async def delete_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -571,19 +604,21 @@ async def get_cluster( project. Args: - request (:class:`~.clusters.GetClusterRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.GetClusterRequest`): The request object. Request to get the resource representation for a cluster in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -600,7 +635,7 @@ async def get_cluster( sent along with the request as metadata. Returns: - ~.clusters.Cluster: + google.cloud.dataproc_v1beta2.types.Cluster: Describes the identifying information, config, and status of a cluster of Compute Engine instances. @@ -641,6 +676,7 @@ async def get_cluster( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -667,19 +703,21 @@ async def list_clusters( alphabetically. Args: - request (:class:`~.clusters.ListClustersRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.ListClustersRequest`): The request object. A request to list the clusters in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -707,6 +745,7 @@ async def list_clusters( status.state = ACTIVE AND clusterName = mycluster AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -718,7 +757,7 @@ async def list_clusters( sent along with the request as metadata. Returns: - ~.pagers.ListClustersAsyncPager: + google.cloud.dataproc_v1beta2.services.cluster_controller.pagers.ListClustersAsyncPager: The list of all clusters in a project. Iterating over this object will yield @@ -761,6 +800,7 @@ async def list_clusters( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, @@ -798,19 +838,21 @@ async def diagnose_cluster( contains [Empty][google.protobuf.Empty]. Args: - request (:class:`~.clusters.DiagnoseClusterRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.DiagnoseClusterRequest`): The request object. A request to collect cluster diagnostic information. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -827,24 +869,22 @@ async def diagnose_cluster( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -878,6 +918,7 @@ async def diagnose_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py index 4be2492d..167dfd57 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py @@ -119,6 +119,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -131,7 +147,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + ClusterControllerClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -239,10 +255,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ClusterControllerTransport]): The + transport (Union[str, ClusterControllerTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -278,21 +294,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -335,7 +347,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -357,22 +369,24 @@ def create_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.CreateClusterRequest`): + request (google.cloud.dataproc_v1beta2.types.CreateClusterRequest): The request object. A request to create a cluster. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (google.cloud.dataproc_v1beta2.types.Cluster): Required. The cluster to create. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this @@ -385,13 +399,11 @@ def create_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1beta2.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -458,31 +470,33 @@ def update_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.UpdateClusterRequest`): + request (google.cloud.dataproc_v1beta2.types.UpdateClusterRequest): The request object. A request to update a cluster. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster (:class:`~.clusters.Cluster`): + cluster (google.cloud.dataproc_v1beta2.types.Cluster): Required. The changes to the cluster. This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers in a cluster to 5, the ``update_mask`` @@ -553,6 +567,7 @@ def update_cluster( autoscaling policies + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -564,13 +579,11 @@ def update_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.clusters.Cluster``: Describes the identifying - information, config, and status of a cluster of Compute - Engine instances. + The result type for the operation will be :class:`google.cloud.dataproc_v1beta2.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. """ # Create or coerce a protobuf request object. @@ -641,22 +654,24 @@ def delete_cluster( `ClusterOperationMetadata `__. Args: - request (:class:`~.clusters.DeleteClusterRequest`): + request (google.cloud.dataproc_v1beta2.types.DeleteClusterRequest): The request object. A request to delete a cluster. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -669,24 +684,22 @@ def delete_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -749,23 +762,25 @@ def get_cluster( project. Args: - request (:class:`~.clusters.GetClusterRequest`): + request (google.cloud.dataproc_v1beta2.types.GetClusterRequest): The request object. Request to get the resource representation for a cluster in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -778,7 +793,7 @@ def get_cluster( sent along with the request as metadata. Returns: - ~.clusters.Cluster: + google.cloud.dataproc_v1beta2.types.Cluster: Describes the identifying information, config, and status of a cluster of Compute Engine instances. @@ -836,23 +851,25 @@ def list_clusters( alphabetically. Args: - request (:class:`~.clusters.ListClustersRequest`): + request (google.cloud.dataproc_v1beta2.types.ListClustersRequest): The request object. A request to list the clusters in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - filter (:class:`str`): + filter (str): Optional. A filter constraining the clusters to list. Filters are case-sensitive and have the following syntax: @@ -876,6 +893,7 @@ def list_clusters( status.state = ACTIVE AND clusterName = mycluster AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -887,7 +905,7 @@ def list_clusters( sent along with the request as metadata. Returns: - ~.pagers.ListClustersPager: + google.cloud.dataproc_v1beta2.services.cluster_controller.pagers.ListClustersPager: The list of all clusters in a project. Iterating over this object will yield @@ -958,23 +976,25 @@ def diagnose_cluster( contains [Empty][google.protobuf.Empty]. Args: - request (:class:`~.clusters.DiagnoseClusterRequest`): + request (google.cloud.dataproc_v1beta2.types.DiagnoseClusterRequest): The request object. A request to collect cluster diagnostic information. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - cluster_name (:class:`str`): + cluster_name (str): Required. The cluster name. This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -987,24 +1007,22 @@ def diagnose_cluster( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py index d7c6c416..84576e5f 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1beta2.types import clusters @@ -24,7 +33,7 @@ class ListClustersPager: """A pager for iterating through ``list_clusters`` requests. This class thinly wraps an initial - :class:`~.clusters.ListClustersResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListClustersResponse` object, and provides an ``__iter__`` method to iterate through its ``clusters`` field. @@ -33,7 +42,7 @@ class ListClustersPager: through the ``clusters`` field on the corresponding responses. - All the usual :class:`~.clusters.ListClustersResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListClustersResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.clusters.ListClustersRequest`): + request (google.cloud.dataproc_v1beta2.types.ListClustersRequest): The initial request object. - response (:class:`~.clusters.ListClustersResponse`): + response (google.cloud.dataproc_v1beta2.types.ListClustersResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListClustersAsyncPager: """A pager for iterating through ``list_clusters`` requests. This class thinly wraps an initial - :class:`~.clusters.ListClustersResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListClustersResponse` object, and provides an ``__aiter__`` method to iterate through its ``clusters`` field. @@ -95,7 +104,7 @@ class ListClustersAsyncPager: through the ``clusters`` field on the corresponding responses. - All the usual :class:`~.clusters.ListClustersResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListClustersResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.clusters.ListClustersRequest`): + request (google.cloud.dataproc_v1beta2.types.ListClustersRequest): The initial request object. - response (:class:`~.clusters.ListClustersResponse`): + response (google.cloud.dataproc_v1beta2.types.ListClustersResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py index 5e0d3298..10250808 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -80,6 +80,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -89,20 +92,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -113,6 +113,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -124,6 +125,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -135,6 +137,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -150,6 +153,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -165,6 +169,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, @@ -176,6 +181,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=300.0, ), default_timeout=300.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py index 2af74b62..cb2b0558 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -90,6 +91,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -104,72 +109,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -177,18 +171,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -202,7 +186,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py index 186cc414..80c4e84d 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py @@ -64,7 +64,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -104,6 +104,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -135,12 +136,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -149,72 +154,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -222,18 +216,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py b/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py index 57234d85..38af5a95 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py @@ -75,7 +75,36 @@ class JobControllerAsyncClient: JobControllerClient.parse_common_location_path ) - from_service_account_file = JobControllerClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerAsyncClient: The constructed client. + """ + return JobControllerClient.from_service_account_info.__func__(JobControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerAsyncClient: The constructed client. + """ + return JobControllerClient.from_service_account_file.__func__(JobControllerAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -153,22 +182,24 @@ async def submit_job( r"""Submits a job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.SubmitJobRequest`): The request object. A request to submit a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (:class:`google.cloud.dataproc_v1beta2.types.Job`): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -181,7 +212,7 @@ async def submit_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -215,6 +246,7 @@ async def submit_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -240,22 +272,24 @@ async def submit_job_as_operation( r"""Submits job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.SubmitJobRequest`): The request object. A request to submit a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (:class:`google.cloud.dataproc_v1beta2.types.Job`): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -268,11 +302,12 @@ async def submit_job_as_operation( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.jobs.Job``: A Dataproc job resource. + :class:`google.cloud.dataproc_v1beta2.types.Job` A + Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -306,6 +341,7 @@ async def submit_job_as_operation( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -340,19 +376,21 @@ async def get_job( project. Args: - request (:class:`~.jobs.GetJobRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.GetJobRequest`): The request object. A request to get the resource representation for a job in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -369,7 +407,7 @@ async def get_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -407,6 +445,7 @@ async def get_job( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -432,18 +471,20 @@ async def list_jobs( r"""Lists regions/{region}/jobs in a project. Args: - request (:class:`~.jobs.ListJobsRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.ListJobsRequest`): The request object. A request to list jobs in a project. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -465,6 +506,7 @@ async def list_jobs( status.state = ACTIVE AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -476,7 +518,7 @@ async def list_jobs( sent along with the request as metadata. Returns: - ~.pagers.ListJobsAsyncPager: + google.cloud.dataproc_v1beta2.services.job_controller.pagers.ListJobsAsyncPager: A list of jobs in a project. Iterating over this object will yield results and resolve additional pages @@ -518,6 +560,7 @@ async def list_jobs( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -546,7 +589,7 @@ async def update_job( r"""Updates a job in a project. Args: - request (:class:`~.jobs.UpdateJobRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.UpdateJobRequest`): The request object. A request to update a job. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -556,7 +599,7 @@ async def update_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -572,6 +615,7 @@ async def update_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -601,18 +645,20 @@ async def cancel_job( `regions/{region}/jobs.get `__. Args: - request (:class:`~.jobs.CancelJobRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.CancelJobRequest`): The request object. A request to cancel a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -629,7 +675,7 @@ async def cancel_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -667,6 +713,7 @@ async def cancel_job( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -693,18 +740,20 @@ async def delete_job( delete fails, and the response returns ``FAILED_PRECONDITION``. Args: - request (:class:`~.jobs.DeleteJobRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.DeleteJobRequest`): The request object. A request to delete a job. project_id (:class:`str`): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. region (:class:`str`): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -751,6 +800,7 @@ async def delete_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/client.py b/google/cloud/dataproc_v1beta2/services/job_controller/client.py index 0989f37a..b487648e 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/client.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/client.py @@ -110,6 +110,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -122,7 +138,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + JobControllerClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -214,10 +230,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.JobControllerTransport]): The + transport (Union[str, JobControllerTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -253,21 +269,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -310,7 +322,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -329,22 +341,24 @@ def submit_job( r"""Submits a job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (google.cloud.dataproc_v1beta2.types.SubmitJobRequest): The request object. A request to submit a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (google.cloud.dataproc_v1beta2.types.Job): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -357,7 +371,7 @@ def submit_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -411,22 +425,24 @@ def submit_job_as_operation( r"""Submits job to a cluster. Args: - request (:class:`~.jobs.SubmitJobRequest`): + request (google.cloud.dataproc_v1beta2.types.SubmitJobRequest): The request object. A request to submit a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job (:class:`~.jobs.Job`): + job (google.cloud.dataproc_v1beta2.types.Job): Required. The job resource. This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this @@ -439,11 +455,12 @@ def submit_job_as_operation( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.jobs.Job``: A Dataproc job resource. + :class:`google.cloud.dataproc_v1beta2.types.Job` A + Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -506,23 +523,25 @@ def get_job( project. Args: - request (:class:`~.jobs.GetJobRequest`): + request (google.cloud.dataproc_v1beta2.types.GetJobRequest): The request object. A request to get the resource representation for a job in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job_id (:class:`str`): + job_id (str): Required. The job ID. This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -535,7 +554,7 @@ def get_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -589,22 +608,24 @@ def list_jobs( r"""Lists regions/{region}/jobs in a project. Args: - request (:class:`~.jobs.ListJobsRequest`): + request (google.cloud.dataproc_v1beta2.types.ListJobsRequest): The request object. A request to list jobs in a project. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - filter (:class:`str`): + filter (str): Optional. A filter constraining the jobs to list. Filters are case-sensitive and have the following syntax: @@ -622,6 +643,7 @@ def list_jobs( status.state = ACTIVE AND labels.env = staging AND labels.starred = \* + This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -633,7 +655,7 @@ def list_jobs( sent along with the request as metadata. Returns: - ~.pagers.ListJobsPager: + google.cloud.dataproc_v1beta2.services.job_controller.pagers.ListJobsPager: A list of jobs in a project. Iterating over this object will yield results and resolve additional pages @@ -694,7 +716,7 @@ def update_job( r"""Updates a job in a project. Args: - request (:class:`~.jobs.UpdateJobRequest`): + request (google.cloud.dataproc_v1beta2.types.UpdateJobRequest): The request object. A request to update a job. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -704,7 +726,7 @@ def update_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -744,22 +766,24 @@ def cancel_job( `regions/{region}/jobs.get `__. Args: - request (:class:`~.jobs.CancelJobRequest`): + request (google.cloud.dataproc_v1beta2.types.CancelJobRequest): The request object. A request to cancel a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job_id (:class:`str`): + job_id (str): Required. The job ID. This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -772,7 +796,7 @@ def cancel_job( sent along with the request as metadata. Returns: - ~.jobs.Job: + google.cloud.dataproc_v1beta2.types.Job: A Dataproc job resource. """ # Create or coerce a protobuf request object. @@ -827,22 +851,24 @@ def delete_job( delete fails, and the response returns ``FAILED_PRECONDITION``. Args: - request (:class:`~.jobs.DeleteJobRequest`): + request (google.cloud.dataproc_v1beta2.types.DeleteJobRequest): The request object. A request to delete a job. - project_id (:class:`str`): + project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. + This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - region (:class:`str`): + region (str): Required. The Dataproc region in which to handle the request. + This corresponds to the ``region`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - job_id (:class:`str`): + job_id (str): Required. The job ID. This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py b/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py index 98cd30cb..1e104382 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1beta2.types import jobs @@ -24,7 +33,7 @@ class ListJobsPager: """A pager for iterating through ``list_jobs`` requests. This class thinly wraps an initial - :class:`~.jobs.ListJobsResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListJobsResponse` object, and provides an ``__iter__`` method to iterate through its ``jobs`` field. @@ -33,7 +42,7 @@ class ListJobsPager: through the ``jobs`` field on the corresponding responses. - All the usual :class:`~.jobs.ListJobsResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListJobsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.jobs.ListJobsRequest`): + request (google.cloud.dataproc_v1beta2.types.ListJobsRequest): The initial request object. - response (:class:`~.jobs.ListJobsResponse`): + response (google.cloud.dataproc_v1beta2.types.ListJobsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListJobsAsyncPager: """A pager for iterating through ``list_jobs`` requests. This class thinly wraps an initial - :class:`~.jobs.ListJobsResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListJobsResponse` object, and provides an ``__aiter__`` method to iterate through its ``jobs`` field. @@ -95,7 +104,7 @@ class ListJobsAsyncPager: through the ``jobs`` field on the corresponding responses. - All the usual :class:`~.jobs.ListJobsResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListJobsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.jobs.ListJobsRequest`): + request (google.cloud.dataproc_v1beta2.types.ListJobsRequest): The initial request object. - response (:class:`~.jobs.ListJobsResponse`): + response (google.cloud.dataproc_v1beta2.types.ListJobsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py b/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py index deea5d1c..7b47cb8f 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py @@ -70,10 +70,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -81,6 +81,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -90,20 +93,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -114,6 +114,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -125,6 +126,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -140,6 +142,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -155,6 +158,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -166,6 +170,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -181,6 +186,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -192,6 +198,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py index 4eb0020e..78a1139b 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -90,6 +91,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -104,72 +109,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -177,18 +171,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -202,7 +186,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py index 1be3cb35..28be8d15 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py @@ -64,7 +64,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -104,6 +104,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -135,12 +136,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -149,72 +154,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -222,18 +216,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py index 71993784..5f2f82c1 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py @@ -93,7 +93,36 @@ class WorkflowTemplateServiceAsyncClient: WorkflowTemplateServiceClient.parse_common_location_path ) - from_service_account_file = WorkflowTemplateServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceAsyncClient: The constructed client. + """ + return WorkflowTemplateServiceClient.from_service_account_info.__func__(WorkflowTemplateServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceAsyncClient: The constructed client. + """ + return WorkflowTemplateServiceClient.from_service_account_file.__func__(WorkflowTemplateServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -171,7 +200,7 @@ async def create_workflow_template( r"""Creates new workflow template. Args: - request (:class:`~.workflow_templates.CreateWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.CreateWorkflowTemplateRequest`): The request object. A request to create a workflow template. parent (:class:`str`): @@ -187,12 +216,14 @@ async def create_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (:class:`google.cloud.dataproc_v1beta2.types.WorkflowTemplate`): Required. The Dataproc workflow template to create. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -204,7 +235,7 @@ async def create_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1beta2.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -238,6 +269,7 @@ async def create_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -269,7 +301,7 @@ async def get_workflow_template( specifying optional version parameter. Args: - request (:class:`~.workflow_templates.GetWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.GetWorkflowTemplateRequest`): The request object. A request to fetch a workflow template. name (:class:`str`): @@ -286,6 +318,7 @@ async def get_workflow_template( resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -297,7 +330,7 @@ async def get_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1beta2.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -333,6 +366,7 @@ async def get_workflow_template( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -385,7 +419,7 @@ async def instantiate_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.InstantiateWorkflowTemplateRequest`): The request object. A request to instantiate a workflow template. name (:class:`str`): @@ -404,14 +438,16 @@ async def instantiate_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`Sequence[~.workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry]`): + parameters (:class:`Sequence[google.cloud.dataproc_v1beta2.types.InstantiateWorkflowTemplateRequest.ParametersEntry]`): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -423,24 +459,22 @@ async def instantiate_workflow_template( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -473,6 +507,7 @@ async def instantiate_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -536,7 +571,7 @@ async def instantiate_inline_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateInlineWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.InstantiateInlineWorkflowTemplateRequest`): The request object. A request to instantiate an inline workflow template. parent (:class:`str`): @@ -554,12 +589,14 @@ async def instantiate_inline_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (:class:`google.cloud.dataproc_v1beta2.types.WorkflowTemplate`): Required. The workflow template to instantiate. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -571,24 +608,22 @@ async def instantiate_inline_workflow_template( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -620,6 +655,7 @@ async def instantiate_inline_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -659,14 +695,15 @@ async def update_workflow_template( server version. Args: - request (:class:`~.workflow_templates.UpdateWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.UpdateWorkflowTemplateRequest`): The request object. A request to update a workflow template. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (:class:`google.cloud.dataproc_v1beta2.types.WorkflowTemplate`): Required. The updated workflow template. The ``template.version`` field must match the current version. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -678,7 +715,7 @@ async def update_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1beta2.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -710,6 +747,7 @@ async def update_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -742,7 +780,7 @@ async def list_workflow_templates( the request. Args: - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesRequest`): The request object. A request to list workflow templates in a project. parent (:class:`str`): @@ -758,6 +796,7 @@ async def list_workflow_templates( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -769,7 +808,7 @@ async def list_workflow_templates( sent along with the request as metadata. Returns: - ~.pagers.ListWorkflowTemplatesAsyncPager: + google.cloud.dataproc_v1beta2.services.workflow_template_service.pagers.ListWorkflowTemplatesAsyncPager: A response to a request to list workflow templates in a project. Iterating over this object will yield @@ -808,6 +847,7 @@ async def list_workflow_templates( exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -844,7 +884,7 @@ async def delete_workflow_template( rogress workflows. Args: - request (:class:`~.workflow_templates.DeleteWorkflowTemplateRequest`): + request (:class:`google.cloud.dataproc_v1beta2.types.DeleteWorkflowTemplateRequest`): The request object. A request to delete a workflow template. Currently started workflows will remain running. @@ -863,6 +903,7 @@ async def delete_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -900,6 +941,7 @@ async def delete_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py index 5c473496..3c23cd89 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py @@ -119,6 +119,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -131,7 +147,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + WorkflowTemplateServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -257,10 +273,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.WorkflowTemplateServiceTransport]): The + transport (Union[str, WorkflowTemplateServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -296,21 +312,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -353,7 +365,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -371,10 +383,10 @@ def create_workflow_template( r"""Creates new workflow template. Args: - request (:class:`~.workflow_templates.CreateWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1beta2.types.CreateWorkflowTemplateRequest): The request object. A request to create a workflow template. - parent (:class:`str`): + parent (str): Required. The resource name of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -387,12 +399,14 @@ def create_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (google.cloud.dataproc_v1beta2.types.WorkflowTemplate): Required. The Dataproc workflow template to create. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -404,7 +418,7 @@ def create_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1beta2.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -464,10 +478,10 @@ def get_workflow_template( specifying optional version parameter. Args: - request (:class:`~.workflow_templates.GetWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1beta2.types.GetWorkflowTemplateRequest): The request object. A request to fetch a workflow template. - name (:class:`str`): + name (str): Required. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. @@ -481,6 +495,7 @@ def get_workflow_template( resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -492,7 +507,7 @@ def get_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1beta2.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -571,10 +586,10 @@ def instantiate_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1beta2.types.InstantiateWorkflowTemplateRequest): The request object. A request to instantiate a workflow template. - name (:class:`str`): + name (str): Required. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. @@ -590,14 +605,16 @@ def instantiate_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`Sequence[~.workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry]`): + parameters (Sequence[google.cloud.dataproc_v1beta2.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -609,24 +626,22 @@ def instantiate_workflow_template( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -653,9 +668,8 @@ def instantiate_workflow_template( if name is not None: request.name = name - - if parameters: - request.parameters.update(parameters) + if parameters is not None: + request.parameters = parameters # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -721,10 +735,10 @@ def instantiate_inline_workflow_template( be [Empty][google.protobuf.Empty]. Args: - request (:class:`~.workflow_templates.InstantiateInlineWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1beta2.types.InstantiateInlineWorkflowTemplateRequest): The request object. A request to instantiate an inline workflow template. - parent (:class:`str`): + parent (str): Required. The resource name of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -739,12 +753,14 @@ def instantiate_inline_workflow_template( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (google.cloud.dataproc_v1beta2.types.WorkflowTemplate): Required. The workflow template to instantiate. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -756,24 +772,22 @@ def instantiate_inline_workflow_template( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -845,14 +859,15 @@ def update_workflow_template( server version. Args: - request (:class:`~.workflow_templates.UpdateWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1beta2.types.UpdateWorkflowTemplateRequest): The request object. A request to update a workflow template. - template (:class:`~.workflow_templates.WorkflowTemplate`): + template (google.cloud.dataproc_v1beta2.types.WorkflowTemplate): Required. The updated workflow template. The ``template.version`` field must match the current version. + This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -864,7 +879,7 @@ def update_workflow_template( sent along with the request as metadata. Returns: - ~.workflow_templates.WorkflowTemplate: + google.cloud.dataproc_v1beta2.types.WorkflowTemplate: A Dataproc workflow template resource. @@ -923,10 +938,10 @@ def list_workflow_templates( the request. Args: - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesRequest): The request object. A request to list workflow templates in a project. - parent (:class:`str`): + parent (str): Required. The resource name of the region or location, as described in https://cloud.google.com/apis/design/resource_names. @@ -939,6 +954,7 @@ def list_workflow_templates( the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -950,7 +966,7 @@ def list_workflow_templates( sent along with the request as metadata. Returns: - ~.pagers.ListWorkflowTemplatesPager: + google.cloud.dataproc_v1beta2.services.workflow_template_service.pagers.ListWorkflowTemplatesPager: A response to a request to list workflow templates in a project. Iterating over this object will yield @@ -1016,11 +1032,11 @@ def delete_workflow_template( rogress workflows. Args: - request (:class:`~.workflow_templates.DeleteWorkflowTemplateRequest`): + request (google.cloud.dataproc_v1beta2.types.DeleteWorkflowTemplateRequest): The request object. A request to delete a workflow template. Currently started workflows will remain running. - name (:class:`str`): + name (str): Required. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. @@ -1035,6 +1051,7 @@ def delete_workflow_template( the resource name of the template has the following format: ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py index 205f2657..83bfd1f9 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.dataproc_v1beta2.types import workflow_templates @@ -24,7 +33,7 @@ class ListWorkflowTemplatesPager: """A pager for iterating through ``list_workflow_templates`` requests. This class thinly wraps an initial - :class:`~.workflow_templates.ListWorkflowTemplatesResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesResponse` object, and provides an ``__iter__`` method to iterate through its ``templates`` field. @@ -33,7 +42,7 @@ class ListWorkflowTemplatesPager: through the ``templates`` field on the corresponding responses. - All the usual :class:`~.workflow_templates.ListWorkflowTemplatesResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesRequest): The initial request object. - response (:class:`~.workflow_templates.ListWorkflowTemplatesResponse`): + response (google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListWorkflowTemplatesAsyncPager: """A pager for iterating through ``list_workflow_templates`` requests. This class thinly wraps an initial - :class:`~.workflow_templates.ListWorkflowTemplatesResponse` object, and + :class:`google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesResponse` object, and provides an ``__aiter__`` method to iterate through its ``templates`` field. @@ -95,7 +104,7 @@ class ListWorkflowTemplatesAsyncPager: through the ``templates`` field on the corresponding responses. - All the usual :class:`~.workflow_templates.ListWorkflowTemplatesResponse` + All the usual :class:`google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -115,9 +124,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.workflow_templates.ListWorkflowTemplatesRequest`): + request (google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesRequest): The initial request object. - response (:class:`~.workflow_templates.ListWorkflowTemplatesResponse`): + response (google.cloud.dataproc_v1beta2.types.ListWorkflowTemplatesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py index 2495d556..40ff6b61 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py @@ -70,10 +70,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -81,6 +81,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -90,20 +93,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -114,6 +114,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -129,6 +130,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -140,6 +142,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -151,6 +154,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -162,6 +166,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -177,6 +182,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.InternalServerError, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -188,6 +194,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py index 8de2cce0..4d514e2d 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -91,6 +92,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -105,72 +110,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -178,18 +172,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -203,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py index 81ffd56b..b02a4e15 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py @@ -65,7 +65,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -105,6 +105,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -136,12 +137,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -150,72 +155,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -223,18 +217,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataproc_v1beta2/types/__init__.py b/google/cloud/dataproc_v1beta2/types/__init__.py index 23523cce..c640c043 100644 --- a/google/cloud/dataproc_v1beta2/types/__init__.py +++ b/google/cloud/dataproc_v1beta2/types/__init__.py @@ -19,181 +19,181 @@ AutoscalingPolicy, BasicAutoscalingAlgorithm, BasicYarnAutoscalingConfig, - InstanceGroupAutoscalingPolicyConfig, CreateAutoscalingPolicyRequest, - GetAutoscalingPolicyRequest, - UpdateAutoscalingPolicyRequest, DeleteAutoscalingPolicyRequest, + GetAutoscalingPolicyRequest, + InstanceGroupAutoscalingPolicyConfig, ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse, + UpdateAutoscalingPolicyRequest, ) from .clusters import ( + AcceleratorConfig, + AutoscalingConfig, Cluster, ClusterConfig, - GkeClusterConfig, - EndpointConfig, - AutoscalingConfig, - EncryptionConfig, - GceClusterConfig, - InstanceGroupConfig, - ManagedGroupConfig, - AcceleratorConfig, - DiskConfig, - LifecycleConfig, - SecurityConfig, - KerberosConfig, - NodeInitializationAction, - ClusterStatus, - SoftwareConfig, ClusterMetrics, + ClusterStatus, CreateClusterRequest, - UpdateClusterRequest, DeleteClusterRequest, + DiagnoseClusterRequest, + DiagnoseClusterResults, + DiskConfig, + EncryptionConfig, + EndpointConfig, + GceClusterConfig, GetClusterRequest, + GkeClusterConfig, + InstanceGroupConfig, + KerberosConfig, + LifecycleConfig, ListClustersRequest, ListClustersResponse, - DiagnoseClusterRequest, - DiagnoseClusterResults, + ManagedGroupConfig, + NodeInitializationAction, ReservationAffinity, + SecurityConfig, + SoftwareConfig, + UpdateClusterRequest, ) from .jobs import ( - LoggingConfig, + CancelJobRequest, + DeleteJobRequest, + GetJobRequest, HadoopJob, - SparkJob, - PySparkJob, - QueryList, HiveJob, - SparkSqlJob, - PigJob, - SparkRJob, - PrestoJob, + Job, + JobMetadata, JobPlacement, - JobStatus, JobReference, - YarnApplication, - Job, JobScheduling, - JobMetadata, - SubmitJobRequest, - GetJobRequest, + JobStatus, ListJobsRequest, - UpdateJobRequest, ListJobsResponse, - CancelJobRequest, - DeleteJobRequest, + LoggingConfig, + PigJob, + PrestoJob, + PySparkJob, + QueryList, + SparkJob, + SparkRJob, + SparkSqlJob, + SubmitJobRequest, + UpdateJobRequest, + YarnApplication, ) from .operations import ( - ClusterOperationStatus, ClusterOperationMetadata, + ClusterOperationStatus, ) from .workflow_templates import ( - WorkflowTemplate, - WorkflowTemplatePlacement, - ManagedCluster, + ClusterOperation, ClusterSelector, + CreateWorkflowTemplateRequest, + DeleteWorkflowTemplateRequest, + GetWorkflowTemplateRequest, + InstantiateInlineWorkflowTemplateRequest, + InstantiateWorkflowTemplateRequest, + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ManagedCluster, OrderedJob, - TemplateParameter, ParameterValidation, RegexValidation, + TemplateParameter, + UpdateWorkflowTemplateRequest, ValueValidation, - WorkflowMetadata, - ClusterOperation, WorkflowGraph, + WorkflowMetadata, WorkflowNode, - CreateWorkflowTemplateRequest, - GetWorkflowTemplateRequest, - InstantiateWorkflowTemplateRequest, - InstantiateInlineWorkflowTemplateRequest, - UpdateWorkflowTemplateRequest, - ListWorkflowTemplatesRequest, - ListWorkflowTemplatesResponse, - DeleteWorkflowTemplateRequest, + WorkflowTemplate, + WorkflowTemplatePlacement, ) __all__ = ( "AutoscalingPolicy", "BasicAutoscalingAlgorithm", "BasicYarnAutoscalingConfig", - "InstanceGroupAutoscalingPolicyConfig", "CreateAutoscalingPolicyRequest", - "GetAutoscalingPolicyRequest", - "UpdateAutoscalingPolicyRequest", "DeleteAutoscalingPolicyRequest", + "GetAutoscalingPolicyRequest", + "InstanceGroupAutoscalingPolicyConfig", "ListAutoscalingPoliciesRequest", "ListAutoscalingPoliciesResponse", - "Component", + "UpdateAutoscalingPolicyRequest", + "AcceleratorConfig", + "AutoscalingConfig", "Cluster", "ClusterConfig", - "GkeClusterConfig", - "EndpointConfig", - "AutoscalingConfig", - "EncryptionConfig", - "GceClusterConfig", - "InstanceGroupConfig", - "ManagedGroupConfig", - "AcceleratorConfig", - "DiskConfig", - "LifecycleConfig", - "SecurityConfig", - "KerberosConfig", - "NodeInitializationAction", - "ClusterStatus", - "SoftwareConfig", "ClusterMetrics", + "ClusterStatus", "CreateClusterRequest", - "UpdateClusterRequest", "DeleteClusterRequest", + "DiagnoseClusterRequest", + "DiagnoseClusterResults", + "DiskConfig", + "EncryptionConfig", + "EndpointConfig", + "GceClusterConfig", "GetClusterRequest", + "GkeClusterConfig", + "InstanceGroupConfig", + "KerberosConfig", + "LifecycleConfig", "ListClustersRequest", "ListClustersResponse", - "DiagnoseClusterRequest", - "DiagnoseClusterResults", + "ManagedGroupConfig", + "NodeInitializationAction", "ReservationAffinity", - "LoggingConfig", + "SecurityConfig", + "SoftwareConfig", + "UpdateClusterRequest", + "CancelJobRequest", + "DeleteJobRequest", + "GetJobRequest", "HadoopJob", - "SparkJob", - "PySparkJob", - "QueryList", "HiveJob", - "SparkSqlJob", - "PigJob", - "SparkRJob", - "PrestoJob", + "Job", + "JobMetadata", "JobPlacement", - "JobStatus", "JobReference", - "YarnApplication", - "Job", "JobScheduling", - "JobMetadata", - "SubmitJobRequest", - "GetJobRequest", + "JobStatus", "ListJobsRequest", - "UpdateJobRequest", "ListJobsResponse", - "CancelJobRequest", - "DeleteJobRequest", - "ClusterOperationStatus", + "LoggingConfig", + "PigJob", + "PrestoJob", + "PySparkJob", + "QueryList", + "SparkJob", + "SparkRJob", + "SparkSqlJob", + "SubmitJobRequest", + "UpdateJobRequest", + "YarnApplication", "ClusterOperationMetadata", - "WorkflowTemplate", - "WorkflowTemplatePlacement", - "ManagedCluster", + "ClusterOperationStatus", + "Component", + "ClusterOperation", "ClusterSelector", + "CreateWorkflowTemplateRequest", + "DeleteWorkflowTemplateRequest", + "GetWorkflowTemplateRequest", + "InstantiateInlineWorkflowTemplateRequest", + "InstantiateWorkflowTemplateRequest", + "ListWorkflowTemplatesRequest", + "ListWorkflowTemplatesResponse", + "ManagedCluster", "OrderedJob", - "TemplateParameter", "ParameterValidation", "RegexValidation", + "TemplateParameter", + "UpdateWorkflowTemplateRequest", "ValueValidation", - "WorkflowMetadata", - "ClusterOperation", "WorkflowGraph", + "WorkflowMetadata", "WorkflowNode", - "CreateWorkflowTemplateRequest", - "GetWorkflowTemplateRequest", - "InstantiateWorkflowTemplateRequest", - "InstantiateInlineWorkflowTemplateRequest", - "UpdateWorkflowTemplateRequest", - "ListWorkflowTemplatesRequest", - "ListWorkflowTemplatesResponse", - "DeleteWorkflowTemplateRequest", + "WorkflowTemplate", + "WorkflowTemplatePlacement", ) diff --git a/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py b/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py index 1a3c408f..ccc13b13 100644 --- a/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py +++ b/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py @@ -62,12 +62,12 @@ class AutoscalingPolicy(proto.Message): - For ``projects.locations.autoscalingPolicies``, the resource name of the policy has the following format: ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - basic_algorithm (~.autoscaling_policies.BasicAutoscalingAlgorithm): + basic_algorithm (google.cloud.dataproc_v1beta2.types.BasicAutoscalingAlgorithm): - worker_config (~.autoscaling_policies.InstanceGroupAutoscalingPolicyConfig): + worker_config (google.cloud.dataproc_v1beta2.types.InstanceGroupAutoscalingPolicyConfig): Required. Describes how the autoscaler will operate for primary workers. - secondary_worker_config (~.autoscaling_policies.InstanceGroupAutoscalingPolicyConfig): + secondary_worker_config (google.cloud.dataproc_v1beta2.types.InstanceGroupAutoscalingPolicyConfig): Optional. Describes how the autoscaler will operate for secondary workers. """ @@ -93,9 +93,9 @@ class BasicAutoscalingAlgorithm(proto.Message): r"""Basic algorithm for autoscaling. Attributes: - yarn_config (~.autoscaling_policies.BasicYarnAutoscalingConfig): + yarn_config (google.cloud.dataproc_v1beta2.types.BasicYarnAutoscalingConfig): Required. YARN autoscaling configuration. - cooldown_period (~.duration.Duration): + cooldown_period (google.protobuf.duration_pb2.Duration): Optional. Duration between scaling events. A scaling period starts after the update operation from the previous event has completed. @@ -114,7 +114,7 @@ class BasicYarnAutoscalingConfig(proto.Message): r"""Basic autoscaling configurations for YARN. Attributes: - graceful_decommission_timeout (~.duration.Duration): + graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): Required. Timeout for YARN graceful decommissioning of Node Managers. Specifies the duration to wait for jobs to complete before forcefully removing workers (and potentially @@ -244,7 +244,7 @@ class CreateAutoscalingPolicyRequest(proto.Message): - For ``projects.locations.autoscalingPolicies.create``, the resource name has the following format: ``projects/{project_id}/locations/{location}`` - policy (~.autoscaling_policies.AutoscalingPolicy): + policy (google.cloud.dataproc_v1beta2.types.AutoscalingPolicy): Required. The autoscaling policy to create. """ @@ -278,7 +278,7 @@ class UpdateAutoscalingPolicyRequest(proto.Message): r"""A request to update an autoscaling policy. Attributes: - policy (~.autoscaling_policies.AutoscalingPolicy): + policy (google.cloud.dataproc_v1beta2.types.AutoscalingPolicy): Required. The updated autoscaling policy. """ @@ -346,7 +346,7 @@ class ListAutoscalingPoliciesResponse(proto.Message): project. Attributes: - policies (Sequence[~.autoscaling_policies.AutoscalingPolicy]): + policies (Sequence[google.cloud.dataproc_v1beta2.types.AutoscalingPolicy]): Output only. Autoscaling policies list. next_page_token (str): Output only. This token is included in the diff --git a/google/cloud/dataproc_v1beta2/types/clusters.py b/google/cloud/dataproc_v1beta2/types/clusters.py index 97903e6f..71047f01 100644 --- a/google/cloud/dataproc_v1beta2/types/clusters.py +++ b/google/cloud/dataproc_v1beta2/types/clusters.py @@ -70,11 +70,11 @@ class Cluster(proto.Message): Required. The cluster name. Cluster names within a project must be unique. Names of deleted clusters can be reused. - config (~.gcd_clusters.ClusterConfig): + config (google.cloud.dataproc_v1beta2.types.ClusterConfig): Required. The cluster config. Note that Dataproc may set default values, and values may change when clusters are updated. - labels (Sequence[~.gcd_clusters.Cluster.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1beta2.types.Cluster.LabelsEntry]): Optional. The labels to associate with this cluster. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -82,15 +82,15 @@ class Cluster(proto.Message): 1 to 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with a cluster. - status (~.gcd_clusters.ClusterStatus): + status (google.cloud.dataproc_v1beta2.types.ClusterStatus): Output only. Cluster status. - status_history (Sequence[~.gcd_clusters.ClusterStatus]): + status_history (Sequence[google.cloud.dataproc_v1beta2.types.ClusterStatus]): Output only. The previous cluster status. cluster_uuid (str): Output only. A cluster UUID (Unique Universal Identifier). Dataproc generates this value when it creates the cluster. - metrics (~.gcd_clusters.ClusterMetrics): + metrics (google.cloud.dataproc_v1beta2.types.ClusterMetrics): Output only. Contains cluster daemon metrics such as HDFS and YARN stats. @@ -143,25 +143,25 @@ class ClusterConfig(proto.Message): project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. - gce_cluster_config (~.gcd_clusters.GceClusterConfig): + gce_cluster_config (google.cloud.dataproc_v1beta2.types.GceClusterConfig): Optional. The shared Compute Engine config settings for all instances in a cluster. - master_config (~.gcd_clusters.InstanceGroupConfig): + master_config (google.cloud.dataproc_v1beta2.types.InstanceGroupConfig): Optional. The Compute Engine config settings for the master instance in a cluster. - worker_config (~.gcd_clusters.InstanceGroupConfig): + worker_config (google.cloud.dataproc_v1beta2.types.InstanceGroupConfig): Optional. The Compute Engine config settings for worker instances in a cluster. - secondary_worker_config (~.gcd_clusters.InstanceGroupConfig): + secondary_worker_config (google.cloud.dataproc_v1beta2.types.InstanceGroupConfig): Optional. The Compute Engine config settings for additional worker instances in a cluster. - software_config (~.gcd_clusters.SoftwareConfig): + software_config (google.cloud.dataproc_v1beta2.types.SoftwareConfig): Optional. The config settings for software inside the cluster. - lifecycle_config (~.gcd_clusters.LifecycleConfig): + lifecycle_config (google.cloud.dataproc_v1beta2.types.LifecycleConfig): Optional. The config setting for auto delete cluster schedule. - initialization_actions (Sequence[~.gcd_clusters.NodeInitializationAction]): + initialization_actions (Sequence[google.cloud.dataproc_v1beta2.types.NodeInitializationAction]): Optional. Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an @@ -177,19 +177,19 @@ class ClusterConfig(proto.Message): else ... worker specific actions ... fi - encryption_config (~.gcd_clusters.EncryptionConfig): + encryption_config (google.cloud.dataproc_v1beta2.types.EncryptionConfig): Optional. Encryption settings for the cluster. - autoscaling_config (~.gcd_clusters.AutoscalingConfig): + autoscaling_config (google.cloud.dataproc_v1beta2.types.AutoscalingConfig): Optional. Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset. - endpoint_config (~.gcd_clusters.EndpointConfig): + endpoint_config (google.cloud.dataproc_v1beta2.types.EndpointConfig): Optional. Port/endpoint configuration for this cluster - security_config (~.gcd_clusters.SecurityConfig): + security_config (google.cloud.dataproc_v1beta2.types.SecurityConfig): Optional. Security related configuration. - gke_cluster_config (~.gcd_clusters.GkeClusterConfig): + gke_cluster_config (google.cloud.dataproc_v1beta2.types.GkeClusterConfig): Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. Setting this is considered mutually exclusive with Compute Engine-based options such as @@ -245,7 +245,7 @@ class GkeClusterConfig(proto.Message): r"""The GKE config for this cluster. Attributes: - namespaced_gke_deployment_target (~.gcd_clusters.GkeClusterConfig.NamespacedGkeDeploymentTarget): + namespaced_gke_deployment_target (google.cloud.dataproc_v1beta2.types.GkeClusterConfig.NamespacedGkeDeploymentTarget): Optional. A target for the deployment. """ @@ -275,7 +275,7 @@ class EndpointConfig(proto.Message): r"""Endpoint config for this cluster Attributes: - http_ports (Sequence[~.gcd_clusters.EndpointConfig.HttpPortsEntry]): + http_ports (Sequence[google.cloud.dataproc_v1beta2.types.EndpointConfig.HttpPortsEntry]): Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true. enable_http_port_access (bool): @@ -403,11 +403,11 @@ class GceClusterConfig(proto.Message): The Compute Engine tags to add to all instances (see `Tagging instances `__). - metadata (Sequence[~.gcd_clusters.GceClusterConfig.MetadataEntry]): + metadata (Sequence[google.cloud.dataproc_v1beta2.types.GceClusterConfig.MetadataEntry]): The Compute Engine metadata entries to add to all instances (see `Project and instance metadata `__). - reservation_affinity (~.gcd_clusters.ReservationAffinity): + reservation_affinity (google.cloud.dataproc_v1beta2.types.ReservationAffinity): Optional. Reservation Affinity for consuming Zonal reservation. """ @@ -481,12 +481,12 @@ class InstanceGroupConfig(proto.Message): Placement `__ feature, you must use the short name of the machine type resource, for example, ``n1-standard-2``. - disk_config (~.gcd_clusters.DiskConfig): + disk_config (google.cloud.dataproc_v1beta2.types.DiskConfig): Optional. Disk option config settings. is_preemptible (bool): Output only. Specifies that this instance group contains preemptible instances. - preemptibility (~.gcd_clusters.InstanceGroupConfig.Preemptibility): + preemptibility (google.cloud.dataproc_v1beta2.types.InstanceGroupConfig.Preemptibility): Optional. Specifies the preemptibility of the instance group. @@ -495,12 +495,12 @@ class InstanceGroupConfig(proto.Message): The default value for secondary instances is ``PREEMPTIBLE``. - managed_group_config (~.gcd_clusters.ManagedGroupConfig): + managed_group_config (google.cloud.dataproc_v1beta2.types.ManagedGroupConfig): Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. - accelerators (Sequence[~.gcd_clusters.AcceleratorConfig]): + accelerators (Sequence[google.cloud.dataproc_v1beta2.types.AcceleratorConfig]): Optional. The Compute Engine accelerator configuration for these instances. min_cpu_platform (str): @@ -627,24 +627,24 @@ class LifecycleConfig(proto.Message): r"""Specifies the cluster auto-delete schedule configuration. Attributes: - idle_delete_ttl (~.duration.Duration): + idle_delete_ttl (google.protobuf.duration_pb2.Duration): Optional. The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of `Duration `__. - auto_delete_time (~.timestamp.Timestamp): + auto_delete_time (google.protobuf.timestamp_pb2.Timestamp): Optional. The time when cluster will be auto-deleted. (see JSON representation of `Timestamp `__). - auto_delete_ttl (~.duration.Duration): + auto_delete_ttl (google.protobuf.duration_pb2.Duration): Optional. The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of `Duration `__). - idle_start_time (~.timestamp.Timestamp): + idle_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of @@ -669,7 +669,7 @@ class SecurityConfig(proto.Message): Kerberos, etc. Attributes: - kerberos_config (~.gcd_clusters.KerberosConfig): + kerberos_config (google.cloud.dataproc_v1beta2.types.KerberosConfig): Kerberos related configuration. """ @@ -790,7 +790,7 @@ class NodeInitializationAction(proto.Message): executable_file (str): Required. Cloud Storage URI of executable file. - execution_timeout (~.duration.Duration): + execution_timeout (google.protobuf.duration_pb2.Duration): Optional. Amount of time executable has to complete. Default is 10 minutes (see JSON representation of `Duration `__). @@ -810,16 +810,16 @@ class ClusterStatus(proto.Message): r"""The status of a cluster and its instances. Attributes: - state (~.gcd_clusters.ClusterStatus.State): + state (google.cloud.dataproc_v1beta2.types.ClusterStatus.State): Output only. The cluster's state. detail (str): Output only. Optional details of cluster's state. - state_start_time (~.timestamp.Timestamp): + state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when this state was entered (see JSON representation of `Timestamp `__). - substate (~.gcd_clusters.ClusterStatus.Substate): + substate (google.cloud.dataproc_v1beta2.types.ClusterStatus.Substate): Output only. Additional state information that includes status reported by the agent. """ @@ -866,7 +866,7 @@ class SoftwareConfig(proto.Message): "1.2.29"), or the `"preview" version `__. If unspecified, it defaults to the latest Debian version. - properties (Sequence[~.gcd_clusters.SoftwareConfig.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.SoftwareConfig.PropertiesEntry]): Optional. The properties to set on daemon config files. Property keys are specified in ``prefix:property`` format, @@ -885,7 +885,7 @@ class SoftwareConfig(proto.Message): For more information, see `Cluster properties `__. - optional_components (Sequence[~.shared.Component]): + optional_components (Sequence[google.cloud.dataproc_v1beta2.types.Component]): The set of optional components to activate on the cluster. """ @@ -906,9 +906,9 @@ class ClusterMetrics(proto.Message): only. It may be changed before final release. Attributes: - hdfs_metrics (Sequence[~.gcd_clusters.ClusterMetrics.HdfsMetricsEntry]): + hdfs_metrics (Sequence[google.cloud.dataproc_v1beta2.types.ClusterMetrics.HdfsMetricsEntry]): The HDFS metrics. - yarn_metrics (Sequence[~.gcd_clusters.ClusterMetrics.YarnMetricsEntry]): + yarn_metrics (Sequence[google.cloud.dataproc_v1beta2.types.ClusterMetrics.YarnMetricsEntry]): The YARN metrics. """ @@ -927,7 +927,7 @@ class CreateClusterRequest(proto.Message): region (str): Required. The Dataproc region in which to handle the request. - cluster (~.gcd_clusters.Cluster): + cluster (google.cloud.dataproc_v1beta2.types.Cluster): Required. The cluster to create. request_id (str): Optional. A unique id used to identify the request. If the @@ -967,9 +967,9 @@ class UpdateClusterRequest(proto.Message): handle the request. cluster_name (str): Required. The cluster name. - cluster (~.gcd_clusters.Cluster): + cluster (google.cloud.dataproc_v1beta2.types.Cluster): Required. The changes to the cluster. - graceful_decommission_timeout (~.duration.Duration): + graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): Optional. Timeout for graceful YARN decomissioning. Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. Timeout specifies how @@ -981,7 +981,7 @@ class UpdateClusterRequest(proto.Message): `Duration `__). Only supported on Dataproc image versions 1.2 and higher. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers in a cluster to 5, the ``update_mask`` parameter @@ -1203,7 +1203,7 @@ class ListClustersResponse(proto.Message): r"""The list of all clusters in a project. Attributes: - clusters (Sequence[~.gcd_clusters.Cluster]): + clusters (Sequence[google.cloud.dataproc_v1beta2.types.Cluster]): Output only. The clusters in the project. next_page_token (str): Output only. This token is included in the response if there @@ -1260,7 +1260,7 @@ class ReservationAffinity(proto.Message): r"""Reservation Affinity for consuming Zonal reservation. Attributes: - consume_reservation_type (~.gcd_clusters.ReservationAffinity.Type): + consume_reservation_type (google.cloud.dataproc_v1beta2.types.ReservationAffinity.Type): Optional. Type of reservation to consume key (str): Optional. Corresponds to the label key of diff --git a/google/cloud/dataproc_v1beta2/types/jobs.py b/google/cloud/dataproc_v1beta2/types/jobs.py index c3b57d43..3b1f50b5 100644 --- a/google/cloud/dataproc_v1beta2/types/jobs.py +++ b/google/cloud/dataproc_v1beta2/types/jobs.py @@ -57,7 +57,7 @@ class LoggingConfig(proto.Message): r"""The runtime logging config of the job. Attributes: - driver_log_levels (Sequence[~.gcd_jobs.LoggingConfig.DriverLogLevelsEntry]): + driver_log_levels (Sequence[google.cloud.dataproc_v1beta2.types.LoggingConfig.DriverLogLevelsEntry]): The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: @@ -120,13 +120,13 @@ class HadoopJob(proto.Message): extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip. - properties (Sequence[~.gcd_jobs.HadoopJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.HadoopJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1beta2.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -182,14 +182,14 @@ class SparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[~.gcd_jobs.SparkJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.SparkJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1beta2.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -241,7 +241,7 @@ class PySparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[~.gcd_jobs.PySparkJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.PySparkJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc @@ -249,7 +249,7 @@ class PySparkJob(proto.Message): set in /etc/spark/conf/spark-defaults.conf and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1beta2.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -306,16 +306,16 @@ class HiveJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains Hive queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1beta2.types.QueryList): A list of queries. continue_on_failure (bool): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[~.gcd_jobs.HiveJob.ScriptVariablesEntry]): + script_variables (Sequence[google.cloud.dataproc_v1beta2.types.HiveJob.ScriptVariablesEntry]): Optional. Mapping of query variable names to values (equivalent to the Hive command: ``SET name="value";``). - properties (Sequence[~.gcd_jobs.HiveJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.HiveJob.PropertiesEntry]): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -351,13 +351,13 @@ class SparkSqlJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains SQL queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1beta2.types.QueryList): A list of queries. - script_variables (Sequence[~.gcd_jobs.SparkSqlJob.ScriptVariablesEntry]): + script_variables (Sequence[google.cloud.dataproc_v1beta2.types.SparkSqlJob.ScriptVariablesEntry]): Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET ``name="value";``). - properties (Sequence[~.gcd_jobs.SparkSqlJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.SparkSqlJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the @@ -365,7 +365,7 @@ class SparkSqlJob(proto.Message): jar_file_uris (Sequence[str]): Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1beta2.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -393,16 +393,16 @@ class PigJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains the Pig queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1beta2.types.QueryList): A list of queries. continue_on_failure (bool): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[~.gcd_jobs.PigJob.ScriptVariablesEntry]): + script_variables (Sequence[google.cloud.dataproc_v1beta2.types.PigJob.ScriptVariablesEntry]): Optional. Mapping of query variable names to values (equivalent to the Pig command: ``name=[value]``). - properties (Sequence[~.gcd_jobs.PigJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.PigJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -412,7 +412,7 @@ class PigJob(proto.Message): Optional. HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1beta2.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -457,7 +457,7 @@ class SparkRJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[~.gcd_jobs.SparkRJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.SparkRJob.PropertiesEntry]): Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc @@ -465,7 +465,7 @@ class SparkRJob(proto.Message): set in /etc/spark/conf/spark-defaults.conf and classes in user code. - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1beta2.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -494,7 +494,7 @@ class PrestoJob(proto.Message): query_file_uri (str): The HCFS URI of the script that contains SQL queries. - query_list (~.gcd_jobs.QueryList): + query_list (google.cloud.dataproc_v1beta2.types.QueryList): A list of queries. continue_on_failure (bool): Optional. Whether to continue executing queries if a query @@ -507,12 +507,12 @@ class PrestoJob(proto.Message): client_tags (Sequence[str]): Optional. Presto client tags to attach to this query - properties (Sequence[~.gcd_jobs.PrestoJob.PropertiesEntry]): + properties (Sequence[google.cloud.dataproc_v1beta2.types.PrestoJob.PropertiesEntry]): Optional. A mapping of property names to values. Used to set Presto `session properties `__ Equivalent to using the --session flag in the Presto CLI - logging_config (~.gcd_jobs.LoggingConfig): + logging_config (google.cloud.dataproc_v1beta2.types.LoggingConfig): Optional. The runtime log config for job execution. """ @@ -555,17 +555,17 @@ class JobStatus(proto.Message): r"""Dataproc job status. Attributes: - state (~.gcd_jobs.JobStatus.State): + state (google.cloud.dataproc_v1beta2.types.JobStatus.State): Output only. A state message specifying the overall job state. details (str): Output only. Optional Job state details, such as an error description if the state is ERROR. - state_start_time (~.timestamp.Timestamp): + state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when this state was entered. - substate (~.gcd_jobs.JobStatus.Substate): + substate (google.cloud.dataproc_v1beta2.types.JobStatus.Substate): Output only. Additional state information, which includes status reported by the agent. """ @@ -635,7 +635,7 @@ class YarnApplication(proto.Message): Attributes: name (str): Output only. The application name. - state (~.gcd_jobs.YarnApplication.State): + state (google.cloud.dataproc_v1beta2.types.YarnApplication.State): Output only. The application state. progress (float): Output only. The numerical progress of the @@ -676,37 +676,37 @@ class Job(proto.Message): r"""A Dataproc job resource. Attributes: - reference (~.gcd_jobs.JobReference): + reference (google.cloud.dataproc_v1beta2.types.JobReference): Optional. The fully qualified reference to the job, which can be used to obtain the equivalent REST path of the job resource. If this property is not specified when a job is created, the server generates a job_id. - placement (~.gcd_jobs.JobPlacement): + placement (google.cloud.dataproc_v1beta2.types.JobPlacement): Required. Job information, including how, when, and where to run the job. - hadoop_job (~.gcd_jobs.HadoopJob): + hadoop_job (google.cloud.dataproc_v1beta2.types.HadoopJob): Optional. Job is a Hadoop job. - spark_job (~.gcd_jobs.SparkJob): + spark_job (google.cloud.dataproc_v1beta2.types.SparkJob): Optional. Job is a Spark job. - pyspark_job (~.gcd_jobs.PySparkJob): + pyspark_job (google.cloud.dataproc_v1beta2.types.PySparkJob): Optional. Job is a PySpark job. - hive_job (~.gcd_jobs.HiveJob): + hive_job (google.cloud.dataproc_v1beta2.types.HiveJob): Optional. Job is a Hive job. - pig_job (~.gcd_jobs.PigJob): + pig_job (google.cloud.dataproc_v1beta2.types.PigJob): Optional. Job is a Pig job. - spark_r_job (~.gcd_jobs.SparkRJob): + spark_r_job (google.cloud.dataproc_v1beta2.types.SparkRJob): Optional. Job is a SparkR job. - spark_sql_job (~.gcd_jobs.SparkSqlJob): + spark_sql_job (google.cloud.dataproc_v1beta2.types.SparkSqlJob): Optional. Job is a SparkSql job. - presto_job (~.gcd_jobs.PrestoJob): + presto_job (google.cloud.dataproc_v1beta2.types.PrestoJob): Optional. Job is a Presto job. - status (~.gcd_jobs.JobStatus): + status (google.cloud.dataproc_v1beta2.types.JobStatus): Output only. The job status. Additional application-specific status information may be contained in the type_job and yarn_applications fields. - status_history (Sequence[~.gcd_jobs.JobStatus]): + status_history (Sequence[google.cloud.dataproc_v1beta2.types.JobStatus]): Output only. The previous job status. - yarn_applications (Sequence[~.gcd_jobs.YarnApplication]): + yarn_applications (Sequence[google.cloud.dataproc_v1beta2.types.YarnApplication]): Output only. The collection of YARN applications spun up by this job. @@ -725,7 +725,7 @@ class Job(proto.Message): control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as ``driver_output_uri``. - labels (Sequence[~.gcd_jobs.Job.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1beta2.types.Job.LabelsEntry]): Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -733,7 +733,7 @@ class Job(proto.Message): 1 to 63 characters, and must conform to `RFC 1035 `__. No more than 32 labels can be associated with a job. - scheduling (~.gcd_jobs.JobScheduling): + scheduling (google.cloud.dataproc_v1beta2.types.JobScheduling): Optional. Job scheduling configuration. job_uuid (str): Output only. A UUID that uniquely identifies a job within @@ -829,11 +829,11 @@ class JobMetadata(proto.Message): Attributes: job_id (str): Output only. The job id. - status (~.gcd_jobs.JobStatus): + status (google.cloud.dataproc_v1beta2.types.JobStatus): Output only. Most recent job status. operation_type (str): Output only. Operation type. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Job submission time. """ @@ -856,7 +856,7 @@ class SubmitJobRequest(proto.Message): region (str): Required. The Dataproc region in which to handle the request. - job (~.gcd_jobs.Job): + job (google.cloud.dataproc_v1beta2.types.Job): Required. The job resource. request_id (str): Optional. A unique id used to identify the request. If the @@ -927,7 +927,7 @@ class ListJobsRequest(proto.Message): Optional. If set, the returned jobs list includes only jobs that were submitted to the named cluster. - job_state_matcher (~.gcd_jobs.ListJobsRequest.JobStateMatcher): + job_state_matcher (google.cloud.dataproc_v1beta2.types.ListJobsRequest.JobStateMatcher): Optional. Specifies enumerated categories of jobs to list. (default = match ALL jobs). @@ -985,9 +985,9 @@ class UpdateJobRequest(proto.Message): handle the request. job_id (str): Required. The job ID. - job (~.gcd_jobs.Job): + job (google.cloud.dataproc_v1beta2.types.Job): Required. The changes to the job. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Specifies the path, relative to Job, of the field to update. For example, to update the labels of a Job the update_mask parameter would be specified as labels, and the @@ -1010,7 +1010,7 @@ class ListJobsResponse(proto.Message): r"""A list of jobs in a project. Attributes: - jobs (Sequence[~.gcd_jobs.Job]): + jobs (Sequence[google.cloud.dataproc_v1beta2.types.Job]): Output only. Jobs list. next_page_token (str): Optional. This token is included in the response if there diff --git a/google/cloud/dataproc_v1beta2/types/operations.py b/google/cloud/dataproc_v1beta2/types/operations.py index 13baf0df..469cc92f 100644 --- a/google/cloud/dataproc_v1beta2/types/operations.py +++ b/google/cloud/dataproc_v1beta2/types/operations.py @@ -31,7 +31,7 @@ class ClusterOperationStatus(proto.Message): r"""The status of the operation. Attributes: - state (~.operations.ClusterOperationStatus.State): + state (google.cloud.dataproc_v1beta2.types.ClusterOperationStatus.State): Output only. A message containing the operation state. inner_state (str): @@ -40,7 +40,7 @@ class ClusterOperationStatus(proto.Message): details (str): Output only. A message containing any operation metadata details. - state_start_time (~.timestamp.Timestamp): + state_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time this state was entered. """ @@ -71,15 +71,15 @@ class ClusterOperationMetadata(proto.Message): operation. cluster_uuid (str): Output only. Cluster UUID for the operation. - status (~.operations.ClusterOperationStatus): + status (google.cloud.dataproc_v1beta2.types.ClusterOperationStatus): Output only. Current operation status. - status_history (Sequence[~.operations.ClusterOperationStatus]): + status_history (Sequence[google.cloud.dataproc_v1beta2.types.ClusterOperationStatus]): Output only. The previous operation status. operation_type (str): Output only. The operation type. description (str): Output only. Short description of operation. - labels (Sequence[~.operations.ClusterOperationMetadata.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1beta2.types.ClusterOperationMetadata.LabelsEntry]): Output only. Labels associated with the operation warnings (Sequence[str]): diff --git a/google/cloud/dataproc_v1beta2/types/workflow_templates.py b/google/cloud/dataproc_v1beta2/types/workflow_templates.py index 22b8d11d..6704df9e 100644 --- a/google/cloud/dataproc_v1beta2/types/workflow_templates.py +++ b/google/cloud/dataproc_v1beta2/types/workflow_templates.py @@ -89,12 +89,12 @@ class WorkflowTemplate(proto.Message): ``version`` field filled in with the current server version. The user updates other fields in the template, then returns it as part of the ``UpdateWorkflowTemplate`` request. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time template was created. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time template was last updated. - labels (Sequence[~.workflow_templates.WorkflowTemplate.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1beta2.types.WorkflowTemplate.LabelsEntry]): Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. @@ -108,18 +108,18 @@ class WorkflowTemplate(proto.Message): 1035 `__. No more than 32 labels can be associated with a template. - placement (~.workflow_templates.WorkflowTemplatePlacement): + placement (google.cloud.dataproc_v1beta2.types.WorkflowTemplatePlacement): Required. WorkflowTemplate scheduling information. - jobs (Sequence[~.workflow_templates.OrderedJob]): + jobs (Sequence[google.cloud.dataproc_v1beta2.types.OrderedJob]): Required. The Directed Acyclic Graph of Jobs to submit. - parameters (Sequence[~.workflow_templates.TemplateParameter]): + parameters (Sequence[google.cloud.dataproc_v1beta2.types.TemplateParameter]): Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. - dag_timeout (~.duration.Duration): + dag_timeout (google.protobuf.duration_pb2.Duration): Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration @@ -163,10 +163,10 @@ class WorkflowTemplatePlacement(proto.Message): Either ``managed_cluster`` or ``cluster_selector`` is required. Attributes: - managed_cluster (~.workflow_templates.ManagedCluster): + managed_cluster (google.cloud.dataproc_v1beta2.types.ManagedCluster): Optional. A cluster that is managed by the workflow. - cluster_selector (~.workflow_templates.ClusterSelector): + cluster_selector (google.cloud.dataproc_v1beta2.types.ClusterSelector): Optional. A selector that chooses target cluster for jobs based on metadata. @@ -196,9 +196,9 @@ class ManagedCluster(proto.Message): begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. - config (~.clusters.ClusterConfig): + config (google.cloud.dataproc_v1beta2.types.ClusterConfig): Required. The cluster configuration. - labels (Sequence[~.workflow_templates.ManagedCluster.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1beta2.types.ManagedCluster.LabelsEntry]): Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and @@ -231,7 +231,7 @@ class ClusterSelector(proto.Message): selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used. - cluster_labels (Sequence[~.workflow_templates.ClusterSelector.ClusterLabelsEntry]): + cluster_labels (Sequence[google.cloud.dataproc_v1beta2.types.ClusterSelector.ClusterLabelsEntry]): Required. The cluster labels. Cluster must have all labels to match. """ @@ -258,23 +258,23 @@ class OrderedJob(proto.Message): underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. - hadoop_job (~.gcd_jobs.HadoopJob): + hadoop_job (google.cloud.dataproc_v1beta2.types.HadoopJob): Optional. Job is a Hadoop job. - spark_job (~.gcd_jobs.SparkJob): + spark_job (google.cloud.dataproc_v1beta2.types.SparkJob): Optional. Job is a Spark job. - pyspark_job (~.gcd_jobs.PySparkJob): + pyspark_job (google.cloud.dataproc_v1beta2.types.PySparkJob): Optional. Job is a PySpark job. - hive_job (~.gcd_jobs.HiveJob): + hive_job (google.cloud.dataproc_v1beta2.types.HiveJob): Optional. Job is a Hive job. - pig_job (~.gcd_jobs.PigJob): + pig_job (google.cloud.dataproc_v1beta2.types.PigJob): Optional. Job is a Pig job. - spark_r_job (~.gcd_jobs.SparkRJob): + spark_r_job (google.cloud.dataproc_v1beta2.types.SparkRJob): Optional. Job is a SparkR job. - spark_sql_job (~.gcd_jobs.SparkSqlJob): + spark_sql_job (google.cloud.dataproc_v1beta2.types.SparkSqlJob): Optional. Job is a SparkSql job. - presto_job (~.gcd_jobs.PrestoJob): + presto_job (google.cloud.dataproc_v1beta2.types.PrestoJob): Optional. Job is a Presto job. - labels (Sequence[~.workflow_templates.OrderedJob.LabelsEntry]): + labels (Sequence[google.cloud.dataproc_v1beta2.types.OrderedJob.LabelsEntry]): Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long, and @@ -286,7 +286,7 @@ class OrderedJob(proto.Message): [\p{Ll}\p{Lo}\p{N}_-]{0,63} No more than 32 labels can be associated with a given job. - scheduling (~.gcd_jobs.JobScheduling): + scheduling (google.cloud.dataproc_v1beta2.types.JobScheduling): Optional. Job scheduling configuration. prerequisite_step_ids (Sequence[str]): Optional. The optional list of prerequisite job step_ids. If @@ -409,7 +409,7 @@ class TemplateParameter(proto.Message): description (str): Optional. Brief description of the parameter. Must not exceed 1024 characters. - validation (~.workflow_templates.ParameterValidation): + validation (google.cloud.dataproc_v1beta2.types.ParameterValidation): Optional. Validation rules to be applied to this parameter's value. """ @@ -427,9 +427,9 @@ class ParameterValidation(proto.Message): r"""Configuration for parameter validation. Attributes: - regex (~.workflow_templates.RegexValidation): + regex (google.cloud.dataproc_v1beta2.types.RegexValidation): Validation based on regular expressions. - values (~.workflow_templates.ValueValidation): + values (google.cloud.dataproc_v1beta2.types.ValueValidation): Validation based on a list of allowed values. """ @@ -487,39 +487,39 @@ class WorkflowMetadata(proto.Message): version (int): Output only. The version of template at the time of workflow instantiation. - create_cluster (~.workflow_templates.ClusterOperation): + create_cluster (google.cloud.dataproc_v1beta2.types.ClusterOperation): Output only. The create cluster operation metadata. - graph (~.workflow_templates.WorkflowGraph): + graph (google.cloud.dataproc_v1beta2.types.WorkflowGraph): Output only. The workflow graph. - delete_cluster (~.workflow_templates.ClusterOperation): + delete_cluster (google.cloud.dataproc_v1beta2.types.ClusterOperation): Output only. The delete cluster operation metadata. - state (~.workflow_templates.WorkflowMetadata.State): + state (google.cloud.dataproc_v1beta2.types.WorkflowMetadata.State): Output only. The workflow state. cluster_name (str): Output only. The name of the target cluster. - parameters (Sequence[~.workflow_templates.WorkflowMetadata.ParametersEntry]): + parameters (Sequence[google.cloud.dataproc_v1beta2.types.WorkflowMetadata.ParametersEntry]): Map from parameter names to values that were used for those parameters. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Workflow start time. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Workflow end time. cluster_uuid (str): Output only. The UUID of target cluster. - dag_timeout (~.duration.Duration): + dag_timeout (google.protobuf.duration_pb2.Duration): Output only. The timeout duration for the DAG of jobs. Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed as a [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping]. For example, "1800" = 1800 seconds/30 minutes duration. - dag_start_time (~.timestamp.Timestamp): + dag_start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. DAG start time, which is only set for workflows with [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout] when the DAG begins. - dag_end_time (~.timestamp.Timestamp): + dag_end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. DAG end time, which is only set for workflows with [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout] @@ -585,7 +585,7 @@ class WorkflowGraph(proto.Message): r"""The workflow graph. Attributes: - nodes (Sequence[~.workflow_templates.WorkflowNode]): + nodes (Sequence[google.cloud.dataproc_v1beta2.types.WorkflowNode]): Output only. The workflow nodes. """ @@ -603,7 +603,7 @@ class WorkflowNode(proto.Message): job_id (str): Output only. The job id; populated after the node enters RUNNING state. - state (~.workflow_templates.WorkflowNode.NodeState): + state (google.cloud.dataproc_v1beta2.types.WorkflowNode.NodeState): Output only. The node state. error (str): Output only. The error detail. @@ -645,7 +645,7 @@ class CreateWorkflowTemplateRequest(proto.Message): - For ``projects.locations.workflowTemplates.create``, the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` - template (~.workflow_templates.WorkflowTemplate): + template (google.cloud.dataproc_v1beta2.types.WorkflowTemplate): Required. The Dataproc workflow template to create. """ @@ -721,7 +721,7 @@ class InstantiateWorkflowTemplateRequest(proto.Message): The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. - parameters (Sequence[~.workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry]): + parameters (Sequence[google.cloud.dataproc_v1beta2.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. @@ -756,7 +756,7 @@ class InstantiateInlineWorkflowTemplateRequest(proto.Message): ``projects.locations.workflowTemplates.instantiateinline``, the resource name of the location has the following format: ``projects/{project_id}/locations/{location}`` - template (~.workflow_templates.WorkflowTemplate): + template (google.cloud.dataproc_v1beta2.types.WorkflowTemplate): Required. The workflow template to instantiate. instance_id (str): @@ -787,7 +787,7 @@ class UpdateWorkflowTemplateRequest(proto.Message): r"""A request to update a workflow template. Attributes: - template (~.workflow_templates.WorkflowTemplate): + template (google.cloud.dataproc_v1beta2.types.WorkflowTemplate): Required. The updated workflow template. The ``template.version`` field must match the current @@ -834,7 +834,7 @@ class ListWorkflowTemplatesResponse(proto.Message): project. Attributes: - templates (Sequence[~.workflow_templates.WorkflowTemplate]): + templates (Sequence[google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Output only. WorkflowTemplates list. next_page_token (str): Output only. This token is included in the response if there diff --git a/noxfile.py b/noxfile.py index f230390f..c6bb3a90 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,22 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -70,17 +87,21 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-e", ".") + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) + + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -101,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -110,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -122,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -142,7 +179,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -174,9 +211,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install( + "sphinx<3.0.0", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa94931..f08bc22c 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index bca0522e..97bf7da8 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -85,7 +85,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] diff --git a/setup.py b/setup.py index e8c26f68..a7e7088f 100644 --- a/setup.py +++ b/setup.py @@ -29,11 +29,10 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", - "libcst >= 0.2.5", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.4.0", ] -extras = {} +extras = {"libcst": "libcst >= 0.2.5"} # Setup boilerplate below this line. diff --git a/synth.metadata b/synth.metadata index cfb4b764..1698f565 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-dataproc.git", - "sha": "2b4f513b09497e047435e679223a8db5d228d845" + "remote": "git@github.com:googleapis/python-dataproc.git", + "sha": "398cdb59ee69a26133a4d76680cf881f2906bbca" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "69697504d9eba1d064820c3085b4750767be6d08", - "internalRef": "348952930" + "sha": "8ff7d794576311d3d68d4df2ac6da93bbfcd7476", + "internalRef": "366472163" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "41a4e56982620d3edcf110d76f4fcdfdec471ac8" + "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" } } ], @@ -49,189 +49,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/dataproc_v1/services.rst", - "docs/dataproc_v1/types.rst", - "docs/dataproc_v1beta2/services.rst", - "docs/dataproc_v1beta2/types.rst", - "docs/multiprocessing.rst", - "google/cloud/dataproc/__init__.py", - "google/cloud/dataproc/py.typed", - "google/cloud/dataproc_v1/__init__.py", - "google/cloud/dataproc_v1/proto/autoscaling_policies.proto", - "google/cloud/dataproc_v1/proto/clusters.proto", - "google/cloud/dataproc_v1/proto/jobs.proto", - "google/cloud/dataproc_v1/proto/operations.proto", - "google/cloud/dataproc_v1/proto/shared.proto", - "google/cloud/dataproc_v1/proto/workflow_templates.proto", - "google/cloud/dataproc_v1/py.typed", - "google/cloud/dataproc_v1/services/__init__.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py", - "google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1/services/cluster_controller/__init__.py", - "google/cloud/dataproc_v1/services/cluster_controller/async_client.py", - "google/cloud/dataproc_v1/services/cluster_controller/client.py", - "google/cloud/dataproc_v1/services/cluster_controller/pagers.py", - "google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py", - "google/cloud/dataproc_v1/services/cluster_controller/transports/base.py", - "google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py", - "google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1/services/job_controller/__init__.py", - "google/cloud/dataproc_v1/services/job_controller/async_client.py", - "google/cloud/dataproc_v1/services/job_controller/client.py", - "google/cloud/dataproc_v1/services/job_controller/pagers.py", - "google/cloud/dataproc_v1/services/job_controller/transports/__init__.py", - "google/cloud/dataproc_v1/services/job_controller/transports/base.py", - "google/cloud/dataproc_v1/services/job_controller/transports/grpc.py", - "google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1/services/workflow_template_service/__init__.py", - "google/cloud/dataproc_v1/services/workflow_template_service/async_client.py", - "google/cloud/dataproc_v1/services/workflow_template_service/client.py", - "google/cloud/dataproc_v1/services/workflow_template_service/pagers.py", - "google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py", - "google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py", - "google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py", - "google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1/types/__init__.py", - "google/cloud/dataproc_v1/types/autoscaling_policies.py", - "google/cloud/dataproc_v1/types/clusters.py", - "google/cloud/dataproc_v1/types/jobs.py", - "google/cloud/dataproc_v1/types/operations.py", - "google/cloud/dataproc_v1/types/shared.py", - "google/cloud/dataproc_v1/types/workflow_templates.py", - "google/cloud/dataproc_v1beta2/__init__.py", - "google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto", - "google/cloud/dataproc_v1beta2/proto/clusters.proto", - "google/cloud/dataproc_v1beta2/proto/jobs.proto", - "google/cloud/dataproc_v1beta2/proto/operations.proto", - "google/cloud/dataproc_v1beta2/proto/shared.proto", - "google/cloud/dataproc_v1beta2/proto/workflow_templates.proto", - "google/cloud/dataproc_v1beta2/py.typed", - "google/cloud/dataproc_v1beta2/services/__init__.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/__init__.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/__init__.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py", - "google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/__init__.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/client.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/transports/__init__.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py", - "google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1beta2/services/job_controller/__init__.py", - "google/cloud/dataproc_v1beta2/services/job_controller/async_client.py", - "google/cloud/dataproc_v1beta2/services/job_controller/client.py", - "google/cloud/dataproc_v1beta2/services/job_controller/pagers.py", - "google/cloud/dataproc_v1beta2/services/job_controller/transports/__init__.py", - "google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py", - "google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py", - "google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/__init__.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/__init__.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py", - "google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py", - "google/cloud/dataproc_v1beta2/types/__init__.py", - "google/cloud/dataproc_v1beta2/types/autoscaling_policies.py", - "google/cloud/dataproc_v1beta2/types/clusters.py", - "google/cloud/dataproc_v1beta2/types/jobs.py", - "google/cloud/dataproc_v1beta2/types/operations.py", - "google/cloud/dataproc_v1beta2/types/shared.py", - "google/cloud/dataproc_v1beta2/types/workflow_templates.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_dataproc_v1_keywords.py", - "scripts/fixup_dataproc_v1beta2_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/dataproc_v1/__init__.py", - "tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py", - "tests/unit/gapic/dataproc_v1/test_cluster_controller.py", - "tests/unit/gapic/dataproc_v1/test_job_controller.py", - "tests/unit/gapic/dataproc_v1/test_workflow_template_service.py", - "tests/unit/gapic/dataproc_v1beta2/__init__.py", - "tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py", - "tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py", - "tests/unit/gapic/dataproc_v1beta2/test_job_controller.py", - "tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py" ] } \ No newline at end of file diff --git a/synth.py b/synth.py index 62644eae..416f69d5 100644 --- a/synth.py +++ b/synth.py @@ -42,6 +42,7 @@ templated_files = common.py_library( samples=True, # set to True only if there are samples microgenerator=True, + cov_level=98, ) s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file @@ -57,4 +58,4 @@ # https://github.com/googleapis/gapic-generator-python/issues/525 s.replace("noxfile.py", '[\"\']-W[\"\']', '# "-W"') -s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 00000000..16e003fe --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.2 +libcst==0.2.5 +proto-plus==1.4.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/gapic/dataproc_v1/__init__.py b/tests/unit/gapic/dataproc_v1/__init__.py index 8b137891..42ffdf2b 100644 --- a/tests/unit/gapic/dataproc_v1/__init__.py +++ b/tests/unit/gapic/dataproc_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index 5fabfeb8..7516619f 100644 --- a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -92,7 +92,25 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( "client_class", - [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient], + [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], +) +def test_autoscaling_policy_service_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", + [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], ) def test_autoscaling_policy_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -102,16 +120,21 @@ def test_autoscaling_policy_service_client_from_service_account_file(client_clas factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_autoscaling_policy_service_client_get_transport_class(): transport = AutoscalingPolicyServiceClient.get_transport_class() - assert transport == transports.AutoscalingPolicyServiceGrpcTransport + available_transports = [ + transports.AutoscalingPolicyServiceGrpcTransport, + ] + assert transport in available_transports transport = AutoscalingPolicyServiceClient.get_transport_class("grpc") assert transport == transports.AutoscalingPolicyServiceGrpcTransport @@ -170,7 +193,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -186,7 +209,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -202,7 +225,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -230,7 +253,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -291,29 +314,25 @@ def test_autoscaling_policy_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -322,66 +341,53 @@ def test_autoscaling_policy_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -411,7 +417,7 @@ def test_autoscaling_policy_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -445,7 +451,7 @@ def test_autoscaling_policy_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -464,7 +470,7 @@ def test_autoscaling_policy_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -518,6 +524,24 @@ def test_create_autoscaling_policy_from_dict(): test_create_autoscaling_policy(request_type=dict) +def test_create_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + client.create_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_create_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -761,6 +785,24 @@ def test_update_autoscaling_policy_from_dict(): test_update_autoscaling_policy(request_type=dict) +def test_update_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + client.update_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_update_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -996,6 +1038,24 @@ def test_get_autoscaling_policy_from_dict(): test_get_autoscaling_policy(request_type=dict) +def test_get_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + client.get_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_get_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -1217,6 +1277,24 @@ def test_list_autoscaling_policies_from_dict(): test_list_autoscaling_policies(request_type=dict) +def test_list_autoscaling_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + client.list_autoscaling_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() + + @pytest.mark.asyncio async def test_list_autoscaling_policies_async( transport: str = "grpc_asyncio", @@ -1617,6 +1695,24 @@ def test_delete_autoscaling_policy_from_dict(): test_delete_autoscaling_policy(request_type=dict) +def test_delete_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + client.delete_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_delete_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -1955,6 +2051,53 @@ def test_autoscaling_policy_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_autoscaling_policy_service_host_no_port(): client = AutoscalingPolicyServiceClient( credentials=credentials.AnonymousCredentials(), @@ -1976,7 +2119,7 @@ def test_autoscaling_policy_service_host_with_port(): def test_autoscaling_policy_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AutoscalingPolicyServiceGrpcTransport( @@ -1988,7 +2131,7 @@ def test_autoscaling_policy_service_grpc_transport_channel(): def test_autoscaling_policy_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( @@ -1999,6 +2142,8 @@ def test_autoscaling_policy_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2013,7 +2158,7 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_sour "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2051,6 +2196,8 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_sour assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2066,7 +2213,7 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_adc(transport_cl ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 6d36f46b..e12e0109 100644 --- a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -97,7 +97,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [ClusterControllerClient, ClusterControllerAsyncClient] + "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] +) +def test_cluster_controller_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] ) def test_cluster_controller_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -107,16 +124,21 @@ def test_cluster_controller_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_cluster_controller_client_get_transport_class(): transport = ClusterControllerClient.get_transport_class() - assert transport == transports.ClusterControllerGrpcTransport + available_transports = [ + transports.ClusterControllerGrpcTransport, + ] + assert transport in available_transports transport = ClusterControllerClient.get_transport_class("grpc") assert transport == transports.ClusterControllerGrpcTransport @@ -167,7 +189,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -183,7 +205,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -199,7 +221,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -227,7 +249,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -288,29 +310,25 @@ def test_cluster_controller_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -319,66 +337,53 @@ def test_cluster_controller_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -404,7 +409,7 @@ def test_cluster_controller_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -434,7 +439,7 @@ def test_cluster_controller_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -453,7 +458,7 @@ def test_cluster_controller_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -491,6 +496,22 @@ def test_create_cluster_from_dict(): test_create_cluster(request_type=dict) +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.CreateClusterRequest() + + @pytest.mark.asyncio async def test_create_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.CreateClusterRequest @@ -652,6 +673,22 @@ def test_update_cluster_from_dict(): test_update_cluster(request_type=dict) +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.UpdateClusterRequest() + + @pytest.mark.asyncio async def test_update_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.UpdateClusterRequest @@ -829,6 +866,22 @@ def test_delete_cluster_from_dict(): test_delete_cluster(request_type=dict) +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.DeleteClusterRequest() + + @pytest.mark.asyncio async def test_delete_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DeleteClusterRequest @@ -999,6 +1052,22 @@ def test_get_cluster_from_dict(): test_get_cluster(request_type=dict) +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.GetClusterRequest() + + @pytest.mark.asyncio async def test_get_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.GetClusterRequest @@ -1173,6 +1242,22 @@ def test_list_clusters_from_dict(): test_list_clusters(request_type=dict) +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.ListClustersRequest() + + @pytest.mark.asyncio async def test_list_clusters_async( transport: str = "grpc_asyncio", request_type=clusters.ListClustersRequest @@ -1451,6 +1536,22 @@ def test_diagnose_cluster_from_dict(): test_diagnose_cluster(request_type=dict) +def test_diagnose_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + client.diagnose_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.DiagnoseClusterRequest() + + @pytest.mark.asyncio async def test_diagnose_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DiagnoseClusterRequest @@ -1749,6 +1850,51 @@ def test_cluster_controller_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_cluster_controller_host_no_port(): client = ClusterControllerClient( credentials=credentials.AnonymousCredentials(), @@ -1770,7 +1916,7 @@ def test_cluster_controller_host_with_port(): def test_cluster_controller_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ClusterControllerGrpcTransport( @@ -1782,7 +1928,7 @@ def test_cluster_controller_grpc_transport_channel(): def test_cluster_controller_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ClusterControllerGrpcAsyncIOTransport( @@ -1793,6 +1939,8 @@ def test_cluster_controller_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1807,7 +1955,7 @@ def test_cluster_controller_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1845,6 +1993,8 @@ def test_cluster_controller_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1860,7 +2010,7 @@ def test_cluster_controller_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/tests/unit/gapic/dataproc_v1/test_job_controller.py b/tests/unit/gapic/dataproc_v1/test_job_controller.py index 5529d6fa..8ba7c041 100644 --- a/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -91,7 +91,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [JobControllerClient, JobControllerAsyncClient] + "client_class", [JobControllerClient, JobControllerAsyncClient,] +) +def test_job_controller_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [JobControllerClient, JobControllerAsyncClient,] ) def test_job_controller_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -101,16 +118,21 @@ def test_job_controller_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_job_controller_client_get_transport_class(): transport = JobControllerClient.get_transport_class() - assert transport == transports.JobControllerGrpcTransport + available_transports = [ + transports.JobControllerGrpcTransport, + ] + assert transport in available_transports transport = JobControllerClient.get_transport_class("grpc") assert transport == transports.JobControllerGrpcTransport @@ -161,7 +183,7 @@ def test_job_controller_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -177,7 +199,7 @@ def test_job_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -193,7 +215,7 @@ def test_job_controller_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -221,7 +243,7 @@ def test_job_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -272,29 +294,25 @@ def test_job_controller_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -303,66 +321,53 @@ def test_job_controller_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -388,7 +393,7 @@ def test_job_controller_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -418,7 +423,7 @@ def test_job_controller_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -437,7 +442,7 @@ def test_job_controller_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -488,6 +493,22 @@ def test_submit_job_from_dict(): test_submit_job(request_type=dict) +def test_submit_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + client.submit_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.SubmitJobRequest() + + @pytest.mark.asyncio async def test_submit_job_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest @@ -662,6 +683,24 @@ def test_submit_job_as_operation_from_dict(): test_submit_job_as_operation(request_type=dict) +def test_submit_job_as_operation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + client.submit_job_as_operation() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.SubmitJobRequest() + + @pytest.mark.asyncio async def test_submit_job_as_operation_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest @@ -842,6 +881,22 @@ def test_get_job_from_dict(): test_get_job(request_type=dict) +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.GetJobRequest() + + @pytest.mark.asyncio async def test_get_job_async( transport: str = "grpc_asyncio", request_type=jobs.GetJobRequest @@ -1009,6 +1064,22 @@ def test_list_jobs_from_dict(): test_list_jobs(request_type=dict) +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.ListJobsRequest() + + @pytest.mark.asyncio async def test_list_jobs_async( transport: str = "grpc_asyncio", request_type=jobs.ListJobsRequest @@ -1276,6 +1347,22 @@ def test_update_job_from_dict(): test_update_job(request_type=dict) +def test_update_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + client.update_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.UpdateJobRequest() + + @pytest.mark.asyncio async def test_update_job_async( transport: str = "grpc_asyncio", request_type=jobs.UpdateJobRequest @@ -1370,6 +1457,22 @@ def test_cancel_job_from_dict(): test_cancel_job(request_type=dict) +def test_cancel_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + client.cancel_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.CancelJobRequest() + + @pytest.mark.asyncio async def test_cancel_job_async( transport: str = "grpc_asyncio", request_type=jobs.CancelJobRequest @@ -1532,6 +1635,22 @@ def test_delete_job_from_dict(): test_delete_job(request_type=dict) +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.DeleteJobRequest() + + @pytest.mark.asyncio async def test_delete_job_async( transport: str = "grpc_asyncio", request_type=jobs.DeleteJobRequest @@ -1819,6 +1938,51 @@ def test_job_controller_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_job_controller_host_no_port(): client = JobControllerClient( credentials=credentials.AnonymousCredentials(), @@ -1840,7 +2004,7 @@ def test_job_controller_host_with_port(): def test_job_controller_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.JobControllerGrpcTransport( @@ -1852,7 +2016,7 @@ def test_job_controller_grpc_transport_channel(): def test_job_controller_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.JobControllerGrpcAsyncIOTransport( @@ -1863,6 +2027,8 @@ def test_job_controller_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1875,7 +2041,7 @@ def test_job_controller_transport_channel_mtls_with_client_cert_source(transport "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1913,6 +2079,8 @@ def test_job_controller_transport_channel_mtls_with_client_cert_source(transport assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1928,7 +2096,7 @@ def test_job_controller_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 2f171a18..379887a9 100644 --- a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -99,7 +99,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient] + "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] +) +def test_workflow_template_service_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] ) def test_workflow_template_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -109,16 +126,21 @@ def test_workflow_template_service_client_from_service_account_file(client_class factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_workflow_template_service_client_get_transport_class(): transport = WorkflowTemplateServiceClient.get_transport_class() - assert transport == transports.WorkflowTemplateServiceGrpcTransport + available_transports = [ + transports.WorkflowTemplateServiceGrpcTransport, + ] + assert transport in available_transports transport = WorkflowTemplateServiceClient.get_transport_class("grpc") assert transport == transports.WorkflowTemplateServiceGrpcTransport @@ -173,7 +195,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -189,7 +211,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -205,7 +227,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -233,7 +255,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -294,29 +316,25 @@ def test_workflow_template_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -325,66 +343,53 @@ def test_workflow_template_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -414,7 +419,7 @@ def test_workflow_template_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -448,7 +453,7 @@ def test_workflow_template_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -467,7 +472,7 @@ def test_workflow_template_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -517,6 +522,24 @@ def test_create_workflow_template_from_dict(): test_create_workflow_template(request_type=dict) +def test_create_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + client.create_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_create_workflow_template_async( transport: str = "grpc_asyncio", @@ -759,6 +782,24 @@ def test_get_workflow_template_from_dict(): test_get_workflow_template(request_type=dict) +def test_get_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + client.get_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.GetWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_get_workflow_template_async( transport: str = "grpc_asyncio", @@ -979,6 +1020,24 @@ def test_instantiate_workflow_template_from_dict(): test_instantiate_workflow_template(request_type=dict) +def test_instantiate_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + client.instantiate_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_instantiate_workflow_template_async( transport: str = "grpc_asyncio", @@ -1203,6 +1262,24 @@ def test_instantiate_inline_workflow_template_from_dict(): test_instantiate_inline_workflow_template(request_type=dict) +def test_instantiate_inline_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + client.instantiate_inline_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_async( transport: str = "grpc_asyncio", @@ -1438,6 +1515,24 @@ def test_update_workflow_template_from_dict(): test_update_workflow_template(request_type=dict) +def test_update_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + client.update_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_update_workflow_template_async( transport: str = "grpc_asyncio", @@ -1673,6 +1768,24 @@ def test_list_workflow_templates_from_dict(): test_list_workflow_templates(request_type=dict) +def test_list_workflow_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + client.list_workflow_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() + + @pytest.mark.asyncio async def test_list_workflow_templates_async( transport: str = "grpc_asyncio", @@ -2069,6 +2182,24 @@ def test_delete_workflow_template_from_dict(): test_delete_workflow_template(request_type=dict) +def test_delete_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + client.delete_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_delete_workflow_template_async( transport: str = "grpc_asyncio", @@ -2414,6 +2545,53 @@ def test_workflow_template_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_workflow_template_service_host_no_port(): client = WorkflowTemplateServiceClient( credentials=credentials.AnonymousCredentials(), @@ -2435,7 +2613,7 @@ def test_workflow_template_service_host_with_port(): def test_workflow_template_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.WorkflowTemplateServiceGrpcTransport( @@ -2447,7 +2625,7 @@ def test_workflow_template_service_grpc_transport_channel(): def test_workflow_template_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( @@ -2458,6 +2636,8 @@ def test_workflow_template_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2472,7 +2652,7 @@ def test_workflow_template_service_transport_channel_mtls_with_client_cert_sourc "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2510,6 +2690,8 @@ def test_workflow_template_service_transport_channel_mtls_with_client_cert_sourc assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2525,7 +2707,7 @@ def test_workflow_template_service_transport_channel_mtls_with_adc(transport_cla ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/tests/unit/gapic/dataproc_v1beta2/__init__.py b/tests/unit/gapic/dataproc_v1beta2/__init__.py index 8b137891..42ffdf2b 100644 --- a/tests/unit/gapic/dataproc_v1beta2/__init__.py +++ b/tests/unit/gapic/dataproc_v1beta2/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py b/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py index 1f24d213..9af5109f 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py @@ -92,7 +92,25 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( "client_class", - [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient], + [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], +) +def test_autoscaling_policy_service_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", + [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], ) def test_autoscaling_policy_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -102,16 +120,21 @@ def test_autoscaling_policy_service_client_from_service_account_file(client_clas factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_autoscaling_policy_service_client_get_transport_class(): transport = AutoscalingPolicyServiceClient.get_transport_class() - assert transport == transports.AutoscalingPolicyServiceGrpcTransport + available_transports = [ + transports.AutoscalingPolicyServiceGrpcTransport, + ] + assert transport in available_transports transport = AutoscalingPolicyServiceClient.get_transport_class("grpc") assert transport == transports.AutoscalingPolicyServiceGrpcTransport @@ -170,7 +193,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -186,7 +209,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -202,7 +225,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -230,7 +253,7 @@ def test_autoscaling_policy_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -291,29 +314,25 @@ def test_autoscaling_policy_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -322,66 +341,53 @@ def test_autoscaling_policy_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -411,7 +417,7 @@ def test_autoscaling_policy_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -445,7 +451,7 @@ def test_autoscaling_policy_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -464,7 +470,7 @@ def test_autoscaling_policy_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -518,6 +524,24 @@ def test_create_autoscaling_policy_from_dict(): test_create_autoscaling_policy(request_type=dict) +def test_create_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + client.create_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_create_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -761,6 +785,24 @@ def test_update_autoscaling_policy_from_dict(): test_update_autoscaling_policy(request_type=dict) +def test_update_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + client.update_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_update_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -996,6 +1038,24 @@ def test_get_autoscaling_policy_from_dict(): test_get_autoscaling_policy(request_type=dict) +def test_get_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + client.get_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_get_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -1217,6 +1277,24 @@ def test_list_autoscaling_policies_from_dict(): test_list_autoscaling_policies(request_type=dict) +def test_list_autoscaling_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + client.list_autoscaling_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() + + @pytest.mark.asyncio async def test_list_autoscaling_policies_async( transport: str = "grpc_asyncio", @@ -1617,6 +1695,24 @@ def test_delete_autoscaling_policy_from_dict(): test_delete_autoscaling_policy(request_type=dict) +def test_delete_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + client.delete_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() + + @pytest.mark.asyncio async def test_delete_autoscaling_policy_async( transport: str = "grpc_asyncio", @@ -1955,6 +2051,53 @@ def test_autoscaling_policy_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_autoscaling_policy_service_host_no_port(): client = AutoscalingPolicyServiceClient( credentials=credentials.AnonymousCredentials(), @@ -1976,7 +2119,7 @@ def test_autoscaling_policy_service_host_with_port(): def test_autoscaling_policy_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AutoscalingPolicyServiceGrpcTransport( @@ -1988,7 +2131,7 @@ def test_autoscaling_policy_service_grpc_transport_channel(): def test_autoscaling_policy_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( @@ -1999,6 +2142,8 @@ def test_autoscaling_policy_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2013,7 +2158,7 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_sour "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2051,6 +2196,8 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_sour assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2066,7 +2213,7 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_adc(transport_cl ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py b/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py index 863598c8..91f7f5a7 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py @@ -99,7 +99,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [ClusterControllerClient, ClusterControllerAsyncClient] + "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] +) +def test_cluster_controller_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] ) def test_cluster_controller_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -109,16 +126,21 @@ def test_cluster_controller_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_cluster_controller_client_get_transport_class(): transport = ClusterControllerClient.get_transport_class() - assert transport == transports.ClusterControllerGrpcTransport + available_transports = [ + transports.ClusterControllerGrpcTransport, + ] + assert transport in available_transports transport = ClusterControllerClient.get_transport_class("grpc") assert transport == transports.ClusterControllerGrpcTransport @@ -169,7 +191,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -185,7 +207,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -201,7 +223,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -229,7 +251,7 @@ def test_cluster_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -290,29 +312,25 @@ def test_cluster_controller_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -321,66 +339,53 @@ def test_cluster_controller_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -406,7 +411,7 @@ def test_cluster_controller_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -436,7 +441,7 @@ def test_cluster_controller_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -455,7 +460,7 @@ def test_cluster_controller_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -493,6 +498,22 @@ def test_create_cluster_from_dict(): test_create_cluster(request_type=dict) +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.CreateClusterRequest() + + @pytest.mark.asyncio async def test_create_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.CreateClusterRequest @@ -654,6 +675,22 @@ def test_update_cluster_from_dict(): test_update_cluster(request_type=dict) +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.UpdateClusterRequest() + + @pytest.mark.asyncio async def test_update_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.UpdateClusterRequest @@ -831,6 +868,22 @@ def test_delete_cluster_from_dict(): test_delete_cluster(request_type=dict) +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.DeleteClusterRequest() + + @pytest.mark.asyncio async def test_delete_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DeleteClusterRequest @@ -1001,6 +1054,22 @@ def test_get_cluster_from_dict(): test_get_cluster(request_type=dict) +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.GetClusterRequest() + + @pytest.mark.asyncio async def test_get_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.GetClusterRequest @@ -1175,6 +1244,22 @@ def test_list_clusters_from_dict(): test_list_clusters(request_type=dict) +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.ListClustersRequest() + + @pytest.mark.asyncio async def test_list_clusters_async( transport: str = "grpc_asyncio", request_type=clusters.ListClustersRequest @@ -1453,6 +1538,22 @@ def test_diagnose_cluster_from_dict(): test_diagnose_cluster(request_type=dict) +def test_diagnose_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + client.diagnose_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == clusters.DiagnoseClusterRequest() + + @pytest.mark.asyncio async def test_diagnose_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DiagnoseClusterRequest @@ -1751,6 +1852,51 @@ def test_cluster_controller_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_cluster_controller_host_no_port(): client = ClusterControllerClient( credentials=credentials.AnonymousCredentials(), @@ -1772,7 +1918,7 @@ def test_cluster_controller_host_with_port(): def test_cluster_controller_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ClusterControllerGrpcTransport( @@ -1784,7 +1930,7 @@ def test_cluster_controller_grpc_transport_channel(): def test_cluster_controller_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ClusterControllerGrpcAsyncIOTransport( @@ -1795,6 +1941,8 @@ def test_cluster_controller_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1809,7 +1957,7 @@ def test_cluster_controller_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1847,6 +1995,8 @@ def test_cluster_controller_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1862,7 +2012,7 @@ def test_cluster_controller_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py b/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py index d9b0e661..fa894d54 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py @@ -93,7 +93,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [JobControllerClient, JobControllerAsyncClient] + "client_class", [JobControllerClient, JobControllerAsyncClient,] +) +def test_job_controller_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [JobControllerClient, JobControllerAsyncClient,] ) def test_job_controller_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -103,16 +120,21 @@ def test_job_controller_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_job_controller_client_get_transport_class(): transport = JobControllerClient.get_transport_class() - assert transport == transports.JobControllerGrpcTransport + available_transports = [ + transports.JobControllerGrpcTransport, + ] + assert transport in available_transports transport = JobControllerClient.get_transport_class("grpc") assert transport == transports.JobControllerGrpcTransport @@ -163,7 +185,7 @@ def test_job_controller_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -179,7 +201,7 @@ def test_job_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -195,7 +217,7 @@ def test_job_controller_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -223,7 +245,7 @@ def test_job_controller_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -274,29 +296,25 @@ def test_job_controller_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -305,66 +323,53 @@ def test_job_controller_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -390,7 +395,7 @@ def test_job_controller_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -420,7 +425,7 @@ def test_job_controller_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -439,7 +444,7 @@ def test_job_controller_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -493,6 +498,22 @@ def test_submit_job_from_dict(): test_submit_job(request_type=dict) +def test_submit_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + client.submit_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.SubmitJobRequest() + + @pytest.mark.asyncio async def test_submit_job_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest @@ -670,6 +691,24 @@ def test_submit_job_as_operation_from_dict(): test_submit_job_as_operation(request_type=dict) +def test_submit_job_as_operation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + client.submit_job_as_operation() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.SubmitJobRequest() + + @pytest.mark.asyncio async def test_submit_job_as_operation_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest @@ -853,6 +892,22 @@ def test_get_job_from_dict(): test_get_job(request_type=dict) +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.GetJobRequest() + + @pytest.mark.asyncio async def test_get_job_async( transport: str = "grpc_asyncio", request_type=jobs.GetJobRequest @@ -1023,6 +1078,22 @@ def test_list_jobs_from_dict(): test_list_jobs(request_type=dict) +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.ListJobsRequest() + + @pytest.mark.asyncio async def test_list_jobs_async( transport: str = "grpc_asyncio", request_type=jobs.ListJobsRequest @@ -1293,6 +1364,22 @@ def test_update_job_from_dict(): test_update_job(request_type=dict) +def test_update_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + client.update_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.UpdateJobRequest() + + @pytest.mark.asyncio async def test_update_job_async( transport: str = "grpc_asyncio", request_type=jobs.UpdateJobRequest @@ -1393,6 +1480,22 @@ def test_cancel_job_from_dict(): test_cancel_job(request_type=dict) +def test_cancel_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + client.cancel_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.CancelJobRequest() + + @pytest.mark.asyncio async def test_cancel_job_async( transport: str = "grpc_asyncio", request_type=jobs.CancelJobRequest @@ -1558,6 +1661,22 @@ def test_delete_job_from_dict(): test_delete_job(request_type=dict) +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == jobs.DeleteJobRequest() + + @pytest.mark.asyncio async def test_delete_job_async( transport: str = "grpc_asyncio", request_type=jobs.DeleteJobRequest @@ -1845,6 +1964,51 @@ def test_job_controller_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_job_controller_host_no_port(): client = JobControllerClient( credentials=credentials.AnonymousCredentials(), @@ -1866,7 +2030,7 @@ def test_job_controller_host_with_port(): def test_job_controller_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.JobControllerGrpcTransport( @@ -1878,7 +2042,7 @@ def test_job_controller_grpc_transport_channel(): def test_job_controller_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.JobControllerGrpcAsyncIOTransport( @@ -1889,6 +2053,8 @@ def test_job_controller_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1901,7 +2067,7 @@ def test_job_controller_transport_channel_mtls_with_client_cert_source(transport "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1939,6 +2105,8 @@ def test_job_controller_transport_channel_mtls_with_client_cert_source(transport assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1954,7 +2122,7 @@ def test_job_controller_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py b/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py index 00e5a115..4d466aa5 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py @@ -99,7 +99,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient] + "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] +) +def test_workflow_template_service_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataproc.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] ) def test_workflow_template_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -109,16 +126,21 @@ def test_workflow_template_service_client_from_service_account_file(client_class factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataproc.googleapis.com:443" def test_workflow_template_service_client_get_transport_class(): transport = WorkflowTemplateServiceClient.get_transport_class() - assert transport == transports.WorkflowTemplateServiceGrpcTransport + available_transports = [ + transports.WorkflowTemplateServiceGrpcTransport, + ] + assert transport in available_transports transport = WorkflowTemplateServiceClient.get_transport_class("grpc") assert transport == transports.WorkflowTemplateServiceGrpcTransport @@ -173,7 +195,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -189,7 +211,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -205,7 +227,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -233,7 +255,7 @@ def test_workflow_template_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -294,29 +316,25 @@ def test_workflow_template_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -325,66 +343,53 @@ def test_workflow_template_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -414,7 +419,7 @@ def test_workflow_template_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -448,7 +453,7 @@ def test_workflow_template_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -467,7 +472,7 @@ def test_workflow_template_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -517,6 +522,24 @@ def test_create_workflow_template_from_dict(): test_create_workflow_template(request_type=dict) +def test_create_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + client.create_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_create_workflow_template_async( transport: str = "grpc_asyncio", @@ -759,6 +782,24 @@ def test_get_workflow_template_from_dict(): test_get_workflow_template(request_type=dict) +def test_get_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + client.get_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.GetWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_get_workflow_template_async( transport: str = "grpc_asyncio", @@ -979,6 +1020,24 @@ def test_instantiate_workflow_template_from_dict(): test_instantiate_workflow_template(request_type=dict) +def test_instantiate_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + client.instantiate_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_instantiate_workflow_template_async( transport: str = "grpc_asyncio", @@ -1203,6 +1262,24 @@ def test_instantiate_inline_workflow_template_from_dict(): test_instantiate_inline_workflow_template(request_type=dict) +def test_instantiate_inline_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + client.instantiate_inline_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_async( transport: str = "grpc_asyncio", @@ -1438,6 +1515,24 @@ def test_update_workflow_template_from_dict(): test_update_workflow_template(request_type=dict) +def test_update_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + client.update_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_update_workflow_template_async( transport: str = "grpc_asyncio", @@ -1673,6 +1768,24 @@ def test_list_workflow_templates_from_dict(): test_list_workflow_templates(request_type=dict) +def test_list_workflow_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + client.list_workflow_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() + + @pytest.mark.asyncio async def test_list_workflow_templates_async( transport: str = "grpc_asyncio", @@ -2069,6 +2182,24 @@ def test_delete_workflow_template_from_dict(): test_delete_workflow_template(request_type=dict) +def test_delete_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + client.delete_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() + + @pytest.mark.asyncio async def test_delete_workflow_template_async( transport: str = "grpc_asyncio", @@ -2414,6 +2545,53 @@ def test_workflow_template_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_workflow_template_service_host_no_port(): client = WorkflowTemplateServiceClient( credentials=credentials.AnonymousCredentials(), @@ -2435,7 +2613,7 @@ def test_workflow_template_service_host_with_port(): def test_workflow_template_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.WorkflowTemplateServiceGrpcTransport( @@ -2447,7 +2625,7 @@ def test_workflow_template_service_grpc_transport_channel(): def test_workflow_template_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( @@ -2458,6 +2636,8 @@ def test_workflow_template_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2472,7 +2652,7 @@ def test_workflow_template_service_transport_channel_mtls_with_client_cert_sourc "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2510,6 +2690,8 @@ def test_workflow_template_service_transport_channel_mtls_with_client_cert_sourc assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2525,7 +2707,7 @@ def test_workflow_template_service_transport_channel_mtls_with_adc(transport_cla ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel